summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichael Klishin <klishinm@vmware.com>2021-12-29 11:00:25 +0300
committerGitHub <noreply@github.com>2021-12-29 11:00:25 +0300
commit0a054bf59e443b5ace9094488ed3215c256a6ecd (patch)
treeb2dded5ae60755b20be41f50d164c28acea4ccd1
parente89dec54e9f8758f0e7262fb80e2af7726c648b5 (diff)
parent4993fefa08fa0785385d23465d1f6de20bd4649c (diff)
downloadrabbitmq-server-git-confirm_on_ack_cli.tar.gz
Merge branch 'master' into confirm_on_ack_cliconfirm_on_ack_cli
-rw-r--r--.bazelrc57
-rw-r--r--.bazelversion1
-rw-r--r--.dockerignore1
-rw-r--r--.github/PULL_REQUEST_TEMPLATE.md31
-rw-r--r--.github/SECURITY.md71
-rw-r--r--.github/dependabot.yaml7
-rw-r--r--.github/mergify.yml49
-rw-r--r--.github/workflows/base-images.yaml54
-rw-r--r--.github/workflows/maintenance.yaml45
-rw-r--r--.github/workflows/oci.yaml114
-rw-r--r--.github/workflows/perform-bazel-execution-comparison.yaml111
-rw-r--r--.github/workflows/rabbitmq_peer_discovery_aws.yaml68
-rw-r--r--.github/workflows/test-erlang-git.yaml66
-rw-r--r--.github/workflows/test-erlang-otp-22.3.yaml8348
-rw-r--r--.github/workflows/test-erlang-otp-23.1.yaml5099
-rw-r--r--.github/workflows/test-mixed-versions.yaml115
-rw-r--r--.github/workflows/test.yaml109
-rw-r--r--.github/workflows/update-bazel-erlang.yaml45
-rw-r--r--.github/workflows/update-otp-for-oci.yaml67
-rw-r--r--.github/workflows/update-rbe-images.yaml60
-rw-r--r--.gitignore12
-rw-r--r--BAZEL.md70
-rw-r--r--BUILD.bats5
-rw-r--r--BUILD.bazel166
-rw-r--r--BUILD.inet_tcp_proxy8
-rw-r--r--BUILD.package_generic_unix46
-rw-r--r--BUILD.ranch46
-rw-r--r--BUILD.trust_store_http15
-rw-r--r--CONTRIBUTING.md33
-rw-r--r--Makefile56
-rw-r--r--README.md20
-rw-r--r--WORKSPACE.bazel107
-rw-r--r--ci/dockerfiles/22.3/erlang_elixir18
-rw-r--r--ci/dockerfiles/23.1/erlang_elixir18
-rw-r--r--ci/dockerfiles/ci42
-rw-r--r--ci/dockerfiles/ci-base20
-rw-r--r--ci/dockerfiles/ci-dep19
-rwxr-xr-xci/scripts/collect.sh7
-rwxr-xr-xci/scripts/ct-suite.sh28
-rwxr-xr-xci/scripts/dialyze.sh8
-rwxr-xr-xci/scripts/fetch_secondary_umbrellas.sh45
-rwxr-xr-xci/scripts/finish.sh5
-rwxr-xr-xci/scripts/package_generic_unix.sh14
-rwxr-xr-xci/scripts/rabbitmq_cli.sh49
-rwxr-xr-xci/scripts/tests.sh26
-rwxr-xr-xci/scripts/validate-workflow.sh26
-rwxr-xr-xci/scripts/xref.sh8
-rw-r--r--deps/amqp10_client/BUILD.bazel102
-rw-r--r--deps/amqp10_client/Makefile4
-rw-r--r--deps/amqp10_client/activemq.bzl19
-rw-r--r--deps/amqp10_client/src/amqp10_client.erl6
-rw-r--r--deps/amqp10_client/src/amqp10_client.hrl2
-rw-r--r--deps/amqp10_client/src/amqp10_client_app.erl2
-rw-r--r--deps/amqp10_client/src/amqp10_client_connection.erl23
-rw-r--r--deps/amqp10_client/src/amqp10_client_connection_sup.erl2
-rw-r--r--deps/amqp10_client/src/amqp10_client_connections_sup.erl2
-rw-r--r--deps/amqp10_client/src/amqp10_client_frame_reader.erl12
-rw-r--r--deps/amqp10_client/src/amqp10_client_session.erl11
-rw-r--r--deps/amqp10_client/src/amqp10_client_sessions_sup.erl2
-rw-r--r--deps/amqp10_client/src/amqp10_client_sup.erl2
-rw-r--r--deps/amqp10_client/src/amqp10_client_types.erl2
-rw-r--r--deps/amqp10_client/src/amqp10_msg.erl2
-rw-r--r--deps/amqp10_client/test/activemq_ct_helpers.erl2
-rw-r--r--deps/amqp10_client/test/mock_server.erl2
-rw-r--r--deps/amqp10_client/test/msg_SUITE.erl2
-rw-r--r--deps/amqp10_client/test/system_SUITE.erl2
-rw-r--r--deps/amqp10_common/BUILD.bazel114
-rw-r--r--deps/amqp10_common/CONTRIBUTING.md4
-rw-r--r--deps/amqp10_common/Makefile4
-rw-r--r--deps/amqp10_common/src/amqp10_binary_generator.erl2
-rw-r--r--deps/amqp10_common/src/amqp10_binary_parser.erl28
-rw-r--r--deps/amqp10_common/src/amqp10_framing.erl2
-rw-r--r--deps/amqp10_common/test/binary_parser_SUITE.erl59
-rw-r--r--deps/amqp_client/BUILD.bazel97
-rw-r--r--deps/amqp_client/CONTRIBUTING.md4
-rw-r--r--deps/amqp_client/Makefile6
-rw-r--r--deps/amqp_client/erlang.mk7746
-rw-r--r--deps/amqp_client/include/amqp_client.hrl2
-rw-r--r--deps/amqp_client/include/amqp_client_internal.hrl5
-rw-r--r--deps/amqp_client/rabbitmq-components.mk359
-rw-r--r--deps/amqp_client/src/amqp_auth_mechanisms.erl2
-rw-r--r--deps/amqp_client/src/amqp_channel.erl24
-rw-r--r--deps/amqp_client/src/amqp_channel_sup.erl15
-rw-r--r--deps/amqp_client/src/amqp_channel_sup_sup.erl2
-rw-r--r--deps/amqp_client/src/amqp_channels_manager.erl4
-rw-r--r--deps/amqp_client/src/amqp_client.erl2
-rw-r--r--deps/amqp_client/src/amqp_connection.erl30
-rw-r--r--deps/amqp_client/src/amqp_connection_sup.erl2
-rw-r--r--deps/amqp_client/src/amqp_connection_type_sup.erl2
-rw-r--r--deps/amqp_client/src/amqp_direct_connection.erl18
-rw-r--r--deps/amqp_client/src/amqp_direct_consumer.erl2
-rw-r--r--deps/amqp_client/src/amqp_gen_connection.erl16
-rw-r--r--deps/amqp_client/src/amqp_gen_consumer.erl2
-rw-r--r--deps/amqp_client/src/amqp_main_reader.erl2
-rw-r--r--deps/amqp_client/src/amqp_network_connection.erl37
-rw-r--r--deps/amqp_client/src/amqp_rpc_client.erl2
-rw-r--r--deps/amqp_client/src/amqp_rpc_server.erl2
-rw-r--r--deps/amqp_client/src/amqp_selective_consumer.erl2
-rw-r--r--deps/amqp_client/src/amqp_ssl.erl47
-rw-r--r--deps/amqp_client/src/amqp_sup.erl2
-rw-r--r--deps/amqp_client/src/amqp_uri.erl2
-rw-r--r--deps/amqp_client/src/amqp_util.erl12
-rw-r--r--deps/amqp_client/src/rabbit_routing_util.erl2
-rw-r--r--deps/amqp_client/src/uri_parser.erl2
-rw-r--r--deps/amqp_client/test/system_SUITE.erl176
-rw-r--r--deps/amqp_client/test/unit_SUITE.erl30
-rw-r--r--deps/rabbit/BUILD.bazel1080
-rw-r--r--deps/rabbit/Makefile24
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/.gitignore1
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/BUILD.bazel45
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/Makefile7
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state.erl57
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_sup.erl10
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_systemd.erl175
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_xterm_titlebar.erl99
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_fmt_helpers.erl194
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_json_fmt.erl119
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_std_h.erl905
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_text_fmt.erl100
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch.erl49
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_conf.erl211
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_dist.erl57
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_early_logging.erl635
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_erlang_compat.erl29
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_errors.erl38
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_sighandler.erl8
-rw-r--r--deps/rabbit/apps/rabbitmq_prelaunch/test/rabbit_logger_std_h_SUITE.erl264
-rw-r--r--deps/rabbit/bats.bzl36
-rw-r--r--deps/rabbit/docs/rabbitmq-diagnostics.86
-rw-r--r--deps/rabbit/docs/rabbitmq-echopid.84
-rw-r--r--deps/rabbit/docs/rabbitmq-env.conf.54
-rw-r--r--deps/rabbit/docs/rabbitmq-plugins.84
-rw-r--r--deps/rabbit/docs/rabbitmq-queues.814
-rw-r--r--deps/rabbit/docs/rabbitmq-server.84
-rw-r--r--deps/rabbit/docs/rabbitmq-server.service.example13
-rw-r--r--deps/rabbit/docs/rabbitmq-service.84
-rw-r--r--deps/rabbit/docs/rabbitmq-upgrade.827
-rw-r--r--deps/rabbit/docs/rabbitmq.conf.example87
-rw-r--r--deps/rabbit/docs/rabbitmqctl.852
-rw-r--r--deps/rabbit/erlang.mk7808
-rw-r--r--deps/rabbit/include/gm_specs.hrl7
-rw-r--r--deps/rabbit/include/rabbit_global_counters.hrl2
-rw-r--r--deps/rabbit/priv/schema/rabbit.schema456
-rw-r--r--deps/rabbit/rabbitmq-components.mk359
-rwxr-xr-xdeps/rabbit/scripts/rabbitmq-env6
-rw-r--r--deps/rabbit/scripts/rabbitmq-env.bat10
-rwxr-xr-xdeps/rabbit/scripts/rabbitmq-server4
-rw-r--r--deps/rabbit/scripts/rabbitmq-server.bat4
-rw-r--r--deps/rabbit/scripts/rabbitmq-service.bat6
-rwxr-xr-xdeps/rabbit/scripts/rabbitmq-streams18
-rw-r--r--deps/rabbit/scripts/rabbitmq-streams.bat18
-rwxr-xr-xdeps/rabbit/scripts/rabbitmq-tanzu23
-rw-r--r--deps/rabbit/scripts/rabbitmq-tanzu.bat56
-rw-r--r--deps/rabbit/src/amqqueue.erl11
-rw-r--r--deps/rabbit/src/amqqueue_v1.erl11
-rw-r--r--deps/rabbit/src/background_gc.erl2
-rw-r--r--deps/rabbit/src/code_server_cache.erl4
-rw-r--r--deps/rabbit/src/gatherer.erl2
-rw-r--r--deps/rabbit/src/gm.erl2
-rw-r--r--deps/rabbit/src/internal_user.erl2
-rw-r--r--deps/rabbit/src/internal_user_v1.erl2
-rw-r--r--deps/rabbit/src/lager_exchange_backend.erl233
-rw-r--r--deps/rabbit/src/lqueue.erl2
-rw-r--r--deps/rabbit/src/mirrored_supervisor_sups.erl2
-rw-r--r--deps/rabbit/src/pg_local.erl2
-rw-r--r--deps/rabbit/src/pid_recomposition.erl71
-rw-r--r--deps/rabbit/src/rabbit.erl346
-rw-r--r--deps/rabbit/src/rabbit_access_control.erl75
-rw-r--r--deps/rabbit/src/rabbit_alarm.erl12
-rw-r--r--deps/rabbit/src/rabbit_amqqueue.erl269
-rw-r--r--deps/rabbit/src/rabbit_amqqueue_process.erl39
-rw-r--r--deps/rabbit/src/rabbit_amqqueue_sup.erl8
-rw-r--r--deps/rabbit/src/rabbit_amqqueue_sup_sup.erl4
-rw-r--r--deps/rabbit/src/rabbit_auth_backend_internal.erl284
-rw-r--r--deps/rabbit/src/rabbit_auth_mechanism_amqplain.erl4
-rw-r--r--deps/rabbit/src/rabbit_auth_mechanism_cr_demo.erl4
-rw-r--r--deps/rabbit/src/rabbit_auth_mechanism_plain.erl4
-rw-r--r--deps/rabbit/src/rabbit_autoheal.erl49
-rw-r--r--deps/rabbit/src/rabbit_backing_queue.erl2
-rw-r--r--deps/rabbit/src/rabbit_basic.erl6
-rw-r--r--deps/rabbit/src/rabbit_binding.erl27
-rw-r--r--deps/rabbit/src/rabbit_boot_steps.erl6
-rw-r--r--deps/rabbit/src/rabbit_channel.erl345
-rw-r--r--deps/rabbit/src/rabbit_channel_interceptor.erl23
-rw-r--r--deps/rabbit/src/rabbit_channel_sup.erl4
-rw-r--r--deps/rabbit/src/rabbit_channel_sup_sup.erl4
-rw-r--r--deps/rabbit/src/rabbit_channel_tracking.erl25
-rw-r--r--deps/rabbit/src/rabbit_channel_tracking_handler.erl4
-rw-r--r--deps/rabbit/src/rabbit_classic_queue.erl48
-rw-r--r--deps/rabbit/src/rabbit_client_sup.erl4
-rw-r--r--deps/rabbit/src/rabbit_connection_helper_sup.erl4
-rw-r--r--deps/rabbit/src/rabbit_connection_sup.erl10
-rw-r--r--deps/rabbit/src/rabbit_connection_tracking.erl27
-rw-r--r--deps/rabbit/src/rabbit_connection_tracking_handler.erl4
-rw-r--r--deps/rabbit/src/rabbit_control_pbe.erl2
-rw-r--r--deps/rabbit/src/rabbit_core_ff.erl4
-rw-r--r--deps/rabbit/src/rabbit_core_metrics_gc.erl4
-rw-r--r--deps/rabbit/src/rabbit_credential_validation.erl4
-rw-r--r--deps/rabbit/src/rabbit_credential_validator.erl4
-rw-r--r--deps/rabbit/src/rabbit_credential_validator_accept_everything.erl4
-rw-r--r--deps/rabbit/src/rabbit_credential_validator_min_password_length.erl4
-rw-r--r--deps/rabbit/src/rabbit_credential_validator_password_regexp.erl4
-rw-r--r--deps/rabbit/src/rabbit_dead_letter.erl10
-rw-r--r--deps/rabbit/src/rabbit_definitions.erl166
-rw-r--r--deps/rabbit/src/rabbit_definitions_import_https.erl84
-rw-r--r--deps/rabbit/src/rabbit_definitions_import_local_filesystem.erl140
-rw-r--r--deps/rabbit/src/rabbit_diagnostics.erl2
-rw-r--r--deps/rabbit/src/rabbit_direct.erl8
-rw-r--r--deps/rabbit/src/rabbit_direct_reply_to.erl78
-rw-r--r--deps/rabbit/src/rabbit_disk_monitor.erl212
-rw-r--r--deps/rabbit/src/rabbit_epmd_monitor.erl8
-rw-r--r--deps/rabbit/src/rabbit_event_consumer.erl2
-rw-r--r--deps/rabbit/src/rabbit_exchange.erl8
-rw-r--r--deps/rabbit/src/rabbit_exchange_decorator.erl4
-rw-r--r--deps/rabbit/src/rabbit_exchange_parameters.erl4
-rw-r--r--deps/rabbit/src/rabbit_exchange_type_direct.erl4
-rw-r--r--deps/rabbit/src/rabbit_exchange_type_fanout.erl4
-rw-r--r--deps/rabbit/src/rabbit_exchange_type_headers.erl6
-rw-r--r--deps/rabbit/src/rabbit_exchange_type_invalid.erl4
-rw-r--r--deps/rabbit/src/rabbit_exchange_type_topic.erl4
-rw-r--r--deps/rabbit/src/rabbit_feature_flags.erl4
-rw-r--r--deps/rabbit/src/rabbit_ff_extra.erl2
-rw-r--r--deps/rabbit/src/rabbit_ff_registry.erl4
-rw-r--r--deps/rabbit/src/rabbit_fhc_helpers.erl2
-rw-r--r--deps/rabbit/src/rabbit_fifo.erl268
-rw-r--r--deps/rabbit/src/rabbit_fifo_client.erl92
-rw-r--r--deps/rabbit/src/rabbit_fifo_v0.erl17
-rw-r--r--deps/rabbit/src/rabbit_file.erl17
-rw-r--r--deps/rabbit/src/rabbit_framing.erl2
-rw-r--r--deps/rabbit/src/rabbit_global_counters.erl216
-rw-r--r--deps/rabbit/src/rabbit_guid.erl2
-rw-r--r--deps/rabbit/src/rabbit_health_check.erl13
-rw-r--r--deps/rabbit/src/rabbit_lager.erl723
-rw-r--r--deps/rabbit/src/rabbit_limiter.erl4
-rw-r--r--deps/rabbit/src/rabbit_log_channel.erl120
-rw-r--r--deps/rabbit/src/rabbit_log_connection.erl120
-rw-r--r--deps/rabbit/src/rabbit_log_feature_flags.erl120
-rw-r--r--deps/rabbit/src/rabbit_log_mirroring.erl122
-rw-r--r--deps/rabbit/src/rabbit_log_prelaunch.erl120
-rw-r--r--deps/rabbit/src/rabbit_log_queue.erl120
-rw-r--r--deps/rabbit/src/rabbit_log_tail.erl2
-rw-r--r--deps/rabbit/src/rabbit_log_upgrade.erl122
-rw-r--r--deps/rabbit/src/rabbit_logger_exchange_h.erl176
-rw-r--r--deps/rabbit/src/rabbit_looking_glass.erl36
-rw-r--r--deps/rabbit/src/rabbit_maintenance.erl83
-rw-r--r--deps/rabbit/src/rabbit_memory_monitor.erl2
-rw-r--r--deps/rabbit/src/rabbit_metrics.erl2
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_coordinator.erl10
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_master.erl16
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_misc.erl201
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_mode.erl2
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_mode_all.erl4
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_mode_exactly.erl4
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_mode_nodes.erl4
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_slave.erl25
-rw-r--r--deps/rabbit/src/rabbit_mirror_queue_sync.erl81
-rw-r--r--deps/rabbit/src/rabbit_mnesia.erl54
-rw-r--r--deps/rabbit/src/rabbit_mnesia_rename.erl16
-rw-r--r--deps/rabbit/src/rabbit_msg_file.erl4
-rw-r--r--deps/rabbit/src/rabbit_msg_record.erl13
-rw-r--r--deps/rabbit/src/rabbit_msg_store.erl39
-rw-r--r--deps/rabbit/src/rabbit_msg_store_ets_index.erl6
-rw-r--r--deps/rabbit/src/rabbit_msg_store_gc.erl4
-rw-r--r--deps/rabbit/src/rabbit_networking.erl93
-rw-r--r--deps/rabbit/src/rabbit_node_monitor.erl83
-rw-r--r--deps/rabbit/src/rabbit_nodes.erl57
-rw-r--r--deps/rabbit/src/rabbit_osiris_metrics.erl74
-rw-r--r--deps/rabbit/src/rabbit_parameter_validation.erl30
-rw-r--r--deps/rabbit/src/rabbit_password.erl4
-rw-r--r--deps/rabbit/src/rabbit_password_hashing_md5.erl2
-rw-r--r--deps/rabbit/src/rabbit_password_hashing_sha256.erl2
-rw-r--r--deps/rabbit/src/rabbit_password_hashing_sha512.erl2
-rw-r--r--deps/rabbit/src/rabbit_peer_discovery.erl65
-rw-r--r--deps/rabbit/src/rabbit_peer_discovery_classic_config.erl61
-rw-r--r--deps/rabbit/src/rabbit_peer_discovery_dns.erl4
-rw-r--r--deps/rabbit/src/rabbit_plugins.erl18
-rw-r--r--deps/rabbit/src/rabbit_policies.erl14
-rw-r--r--deps/rabbit/src/rabbit_policy.erl193
-rw-r--r--deps/rabbit/src/rabbit_policy_merge_strategy.erl2
-rw-r--r--deps/rabbit/src/rabbit_prelaunch_cluster.erl21
-rw-r--r--deps/rabbit/src/rabbit_prelaunch_enabled_plugins_file.erl31
-rw-r--r--deps/rabbit/src/rabbit_prelaunch_feature_flags.erl25
-rw-r--r--deps/rabbit/src/rabbit_prelaunch_logging.erl1749
-rw-r--r--deps/rabbit/src/rabbit_prequeue.erl4
-rw-r--r--deps/rabbit/src/rabbit_priority_queue.erl4
-rw-r--r--deps/rabbit/src/rabbit_queue_consumers.erl21
-rw-r--r--deps/rabbit/src/rabbit_queue_decorator.erl2
-rw-r--r--deps/rabbit/src/rabbit_queue_index.erl54
-rw-r--r--deps/rabbit/src/rabbit_queue_location_client_local.erl2
-rw-r--r--deps/rabbit/src/rabbit_queue_location_min_masters.erl2
-rw-r--r--deps/rabbit/src/rabbit_queue_location_random.erl2
-rw-r--r--deps/rabbit/src/rabbit_queue_location_validator.erl2
-rw-r--r--deps/rabbit/src/rabbit_queue_master_location_misc.erl2
-rw-r--r--deps/rabbit/src/rabbit_queue_master_locator.erl2
-rw-r--r--deps/rabbit/src/rabbit_queue_type.erl71
-rw-r--r--deps/rabbit/src/rabbit_queue_type_util.erl36
-rw-r--r--deps/rabbit/src/rabbit_quorum_memory_manager.erl4
-rw-r--r--deps/rabbit/src/rabbit_quorum_queue.erl263
-rw-r--r--deps/rabbit/src/rabbit_ra_registry.erl17
-rw-r--r--deps/rabbit/src/rabbit_reader.erl71
-rw-r--r--deps/rabbit/src/rabbit_recovery_terms.erl4
-rw-r--r--deps/rabbit/src/rabbit_restartable_sup.erl4
-rw-r--r--deps/rabbit/src/rabbit_router.erl15
-rw-r--r--deps/rabbit/src/rabbit_runtime_parameters.erl85
-rw-r--r--deps/rabbit/src/rabbit_ssl.erl46
-rw-r--r--deps/rabbit/src/rabbit_stream_coordinator.erl2123
-rw-r--r--deps/rabbit/src/rabbit_stream_coordinator.hrl63
-rw-r--r--deps/rabbit/src/rabbit_stream_queue.erl695
-rw-r--r--deps/rabbit/src/rabbit_sup.erl4
-rw-r--r--deps/rabbit/src/rabbit_sysmon_handler.erl2
-rw-r--r--deps/rabbit/src/rabbit_sysmon_minder.erl2
-rw-r--r--deps/rabbit/src/rabbit_table.erl10
-rw-r--r--deps/rabbit/src/rabbit_trace.erl39
-rw-r--r--deps/rabbit/src/rabbit_tracking.erl4
-rw-r--r--deps/rabbit/src/rabbit_upgrade.erl20
-rw-r--r--deps/rabbit/src/rabbit_upgrade_functions.erl6
-rw-r--r--deps/rabbit/src/rabbit_upgrade_preparation.erl2
-rw-r--r--deps/rabbit/src/rabbit_variable_queue.erl18
-rw-r--r--deps/rabbit/src/rabbit_version.erl2
-rw-r--r--deps/rabbit/src/rabbit_vhost.erl252
-rw-r--r--deps/rabbit/src/rabbit_vhost_limit.erl4
-rw-r--r--deps/rabbit/src/rabbit_vhost_msg_store.erl4
-rw-r--r--deps/rabbit/src/rabbit_vhost_process.erl6
-rw-r--r--deps/rabbit/src/rabbit_vhost_sup.erl4
-rw-r--r--deps/rabbit/src/rabbit_vhost_sup_sup.erl6
-rw-r--r--deps/rabbit/src/rabbit_vhost_sup_wrapper.erl4
-rw-r--r--deps/rabbit/src/rabbit_vm.erl11
-rw-r--r--deps/rabbit/src/supervised_lifecycle.erl2
-rw-r--r--deps/rabbit/src/tcp_listener.erl40
-rw-r--r--deps/rabbit/src/tcp_listener_sup.erl24
-rw-r--r--deps/rabbit/src/term_to_binary_compat.erl4
-rw-r--r--deps/rabbit/src/vhost.erl33
-rw-r--r--deps/rabbit/src/vhost_v1.erl2
-rw-r--r--deps/rabbit/test/backing_queue_SUITE.erl106
-rw-r--r--deps/rabbit/test/channel_interceptor_SUITE.erl53
-rw-r--r--deps/rabbit/test/channel_operation_timeout_SUITE.erl2
-rw-r--r--deps/rabbit/test/channel_operation_timeout_test_queue.erl6
-rw-r--r--deps/rabbit/test/cluster_SUITE.erl4
-rw-r--r--deps/rabbit/test/cluster_rename_SUITE.erl2
-rw-r--r--deps/rabbit/test/clustering_management_SUITE.erl8
-rw-r--r--deps/rabbit/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbit/test/config_schema_SUITE_data/rabbit.snippets205
-rw-r--r--deps/rabbit/test/consumer_timeout_SUITE.erl2
-rw-r--r--deps/rabbit/test/crashing_queues_SUITE.erl5
-rw-r--r--deps/rabbit/test/dead_lettering_SUITE.erl2
-rw-r--r--deps/rabbit/test/definition_import_SUITE.erl191
-rw-r--r--deps/rabbit/test/definition_import_SUITE_data/case14.json34
-rw-r--r--deps/rabbit/test/definition_import_SUITE_data/case15.json53
-rw-r--r--deps/rabbit/test/definition_import_SUITE_data/case16.json58
-rw-r--r--deps/rabbit/test/definition_import_SUITE_data/case18.json46
-rw-r--r--deps/rabbit/test/definition_import_SUITE_data/case9/case9a.json448
-rw-r--r--deps/rabbit/test/definition_import_SUITE_data/case9/case9b.json587
-rw-r--r--deps/rabbit/test/definition_import_SUITE_data/failing_case17.json19
-rw-r--r--deps/rabbit/test/definition_import_SUITE_data/failing_case19.json46
-rw-r--r--deps/rabbit/test/disconnect_detected_during_alarm_SUITE.erl2
-rw-r--r--deps/rabbit/test/dummy_event_receiver.erl4
-rw-r--r--deps/rabbit/test/dummy_interceptor.erl8
-rw-r--r--deps/rabbit/test/dummy_runtime_parameters.erl2
-rw-r--r--deps/rabbit/test/dummy_supervisor2.erl2
-rw-r--r--deps/rabbit/test/dynamic_ha_SUITE.erl2
-rw-r--r--deps/rabbit/test/dynamic_qq_SUITE.erl54
-rw-r--r--deps/rabbit/test/eager_sync_SUITE.erl8
-rw-r--r--deps/rabbit/test/feature_flags_SUITE.erl106
-rw-r--r--deps/rabbit/test/feature_flags_SUITE_data/my_plugin/BUILD.bazel13
-rw-r--r--deps/rabbit/test/feature_flags_SUITE_data/my_plugin/Makefile4
-rw-r--r--deps/rabbit/test/feature_flags_SUITE_data/my_plugin/erlang.mk1
-rw-r--r--deps/rabbit/test/feature_flags_SUITE_data/my_plugin/rabbitmq-components.mk1
-rw-r--r--deps/rabbit/test/feature_flags_SUITE_data/my_plugin/src/my_plugin.erl2
-rw-r--r--deps/rabbit/test/feature_flags_with_unpriveleged_user_SUITE.erl85
-rw-r--r--deps/rabbit/test/lazy_queue_SUITE.erl2
-rw-r--r--deps/rabbit/test/list_consumers_sanity_check_SUITE.erl2
-rw-r--r--deps/rabbit/test/list_queues_online_and_offline_SUITE.erl2
-rw-r--r--deps/rabbit/test/logging_SUITE.erl1332
-rw-r--r--deps/rabbit/test/maintenance_mode_SUITE.erl66
-rw-r--r--deps/rabbit/test/many_node_ha_SUITE.erl2
-rw-r--r--deps/rabbit/test/message_size_limit_SUITE.erl2
-rw-r--r--deps/rabbit/test/metrics_SUITE.erl2
-rw-r--r--deps/rabbit/test/mirrored_supervisor_SUITE.erl2
-rw-r--r--deps/rabbit/test/mirrored_supervisor_SUITE_gs.erl2
-rw-r--r--deps/rabbit/test/msg_store_SUITE.erl2
-rw-r--r--deps/rabbit/test/peer_discovery_classic_config_SUITE.erl58
-rw-r--r--deps/rabbit/test/peer_discovery_dns_SUITE.erl10
-rw-r--r--deps/rabbit/test/per_user_connection_channel_limit_SUITE.erl62
-rw-r--r--deps/rabbit/test/per_user_connection_channel_limit_partitions_SUITE.erl59
-rw-r--r--deps/rabbit/test/per_user_connection_channel_tracking_SUITE.erl21
-rw-r--r--deps/rabbit/test/per_user_connection_tracking_SUITE.erl2
-rw-r--r--deps/rabbit/test/per_vhost_connection_limit_SUITE.erl229
-rw-r--r--deps/rabbit/test/per_vhost_connection_limit_partitions_SUITE.erl28
-rw-r--r--deps/rabbit/test/per_vhost_msg_store_SUITE.erl2
-rw-r--r--deps/rabbit/test/per_vhost_queue_limit_SUITE.erl2
-rw-r--r--deps/rabbit/test/policy_SUITE.erl2
-rw-r--r--deps/rabbit/test/priority_queue_SUITE.erl298
-rw-r--r--deps/rabbit/test/priority_queue_recovery_SUITE.erl2
-rw-r--r--deps/rabbit/test/product_info_SUITE.erl2
-rw-r--r--deps/rabbit/test/proxy_protocol_SUITE.erl6
-rw-r--r--deps/rabbit/test/publisher_confirms_parallel_SUITE.erl14
-rw-r--r--deps/rabbit/test/queue_length_limits_SUITE.erl2
-rw-r--r--deps/rabbit/test/queue_master_location_SUITE.erl12
-rw-r--r--deps/rabbit/test/queue_parallel_SUITE.erl53
-rw-r--r--deps/rabbit/test/queue_type_SUITE.erl71
-rw-r--r--deps/rabbit/test/quorum_queue_SUITE.erl503
-rw-r--r--deps/rabbit/test/quorum_queue_utils.erl69
-rw-r--r--deps/rabbit/test/rabbit_auth_backend_context_propagation_mock.erl2
-rw-r--r--deps/rabbit/test/rabbit_core_metrics_gc_SUITE.erl2
-rw-r--r--deps/rabbit/test/rabbit_dummy_protocol_connection_info.erl2
-rw-r--r--deps/rabbit/test/rabbit_fifo_SUITE.erl124
-rw-r--r--deps/rabbit/test/rabbit_fifo_int_SUITE.erl50
-rw-r--r--deps/rabbit/test/rabbit_fifo_prop_SUITE.erl6
-rw-r--r--deps/rabbit/test/rabbit_foo_protocol_connection_info.erl2
-rw-r--r--deps/rabbit/test/rabbit_ha_test_consumer.erl8
-rw-r--r--deps/rabbit/test/rabbit_ha_test_producer.erl2
-rw-r--r--deps/rabbit/test/rabbit_msg_record_SUITE.erl21
-rw-r--r--deps/rabbit/test/rabbit_stream_coordinator_SUITE.erl1197
-rw-r--r--deps/rabbit/test/rabbit_stream_queue_SUITE.erl1184
-rw-r--r--deps/rabbit/test/rabbitmq-env.bats31
-rw-r--r--deps/rabbit/test/rabbitmq_queues_cli_integration_SUITE.erl40
-rw-r--r--deps/rabbit/test/rabbitmqctl_integration_SUITE.erl2
-rw-r--r--deps/rabbit/test/rabbitmqctl_shutdown_SUITE.erl2
-rw-r--r--deps/rabbit/test/signal_handling_SUITE.erl2
-rw-r--r--deps/rabbit/test/simple_ha_SUITE.erl42
-rw-r--r--deps/rabbit/test/single_active_consumer_SUITE.erl2
-rw-r--r--deps/rabbit/test/sync_detection_SUITE.erl2
-rw-r--r--deps/rabbit/test/term_to_binary_compat_prop_SUITE.erl4
-rw-r--r--deps/rabbit/test/topic_permission_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_access_control_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_access_control_authn_authz_context_propagation_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_access_control_credential_validation_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_amqp091_content_framing_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_amqp091_server_properties_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_app_management_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_classic_mirrored_queue_sync_throttling_SUITE.erl88
-rw-r--r--deps/rabbit/test/unit_classic_mirrored_queue_throughput_SUITE.erl29
-rw-r--r--deps/rabbit/test/unit_cluster_formation_locking_mocks_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_collections_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_config_value_encryption_SUITE.erl15
-rw-r--r--deps/rabbit/test/unit_connection_tracking_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_credit_flow_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_disk_monitor_SUITE.erl13
-rw-r--r--deps/rabbit/test/unit_disk_monitor_mocks_SUITE.erl4
-rw-r--r--deps/rabbit/test/unit_file_handle_cache_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_gen_server2_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_gm_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_log_config_SUITE.erl837
-rw-r--r--deps/rabbit/test/unit_log_management_SUITE.erl131
-rw-r--r--deps/rabbit/test/unit_operator_policy_SUITE.erl57
-rw-r--r--deps/rabbit/test/unit_pg_local_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_plugin_directories_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_plugin_versioning_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_policy_validators_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_priority_queue_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_queue_consumers_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_stats_and_metrics_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_supervisor2_SUITE.erl2
-rw-r--r--deps/rabbit/test/unit_vm_memory_monitor_SUITE.erl43
-rw-r--r--deps/rabbit/test/upgrade_preparation_SUITE.erl24
-rw-r--r--deps/rabbit/test/vhost_SUITE.erl16
-rw-r--r--deps/rabbit_common/BUILD.bazel239
-rw-r--r--deps/rabbit_common/CONTRIBUTING.md4
-rw-r--r--deps/rabbit_common/Makefile8
-rw-r--r--deps/rabbit_common/erlang.mk7746
-rw-r--r--deps/rabbit_common/include/logging.hrl19
-rw-r--r--deps/rabbit_common/include/rabbit.hrl12
-rw-r--r--deps/rabbit_common/include/rabbit_core_metrics.hrl2
-rw-r--r--deps/rabbit_common/include/rabbit_memory.hrl2
-rw-r--r--deps/rabbit_common/include/rabbit_misc.hrl2
-rw-r--r--deps/rabbit_common/include/rabbit_msg_store.hrl2
-rw-r--r--deps/rabbit_common/include/resource.hrl2
-rw-r--r--deps/rabbit_common/mk/rabbitmq-build.mk19
-rw-r--r--deps/rabbit_common/mk/rabbitmq-components.mk359
-rw-r--r--deps/rabbit_common/mk/rabbitmq-dist.mk12
-rw-r--r--deps/rabbit_common/mk/rabbitmq-early-test.mk1
-rw-r--r--deps/rabbit_common/mk/rabbitmq-hexpm.mk17
-rw-r--r--deps/rabbit_common/mk/rabbitmq-run.mk74
-rw-r--r--deps/rabbit_common/mk/rabbitmq-tools.mk69
-rw-r--r--deps/rabbit_common/src/app_utils.erl2
-rw-r--r--deps/rabbit_common/src/code_version.erl2
-rw-r--r--deps/rabbit_common/src/credit_flow.erl2
-rw-r--r--deps/rabbit_common/src/delegate.erl66
-rw-r--r--deps/rabbit_common/src/delegate_sup.erl2
-rw-r--r--deps/rabbit_common/src/file_handle_cache.erl6
-rw-r--r--deps/rabbit_common/src/file_handle_cache_stats.erl2
-rw-r--r--deps/rabbit_common/src/gen_server2.erl4
-rw-r--r--deps/rabbit_common/src/lager_forwarder_backend.erl120
-rw-r--r--deps/rabbit_common/src/mirrored_supervisor.erl105
-rw-r--r--deps/rabbit_common/src/mirrored_supervisor_locks.erl33
-rw-r--r--deps/rabbit_common/src/mnesia_sync.erl2
-rw-r--r--deps/rabbit_common/src/pmon.erl2
-rw-r--r--deps/rabbit_common/src/priority_queue.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_amqp_connection.erl6
-rw-r--r--deps/rabbit_common/src/rabbit_amqqueue_common.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_auth_backend_dummy.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_auth_mechanism.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_authn_backend.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_authz_backend.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_basic_common.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_binary_generator.erl4
-rw-r--r--deps/rabbit_common/src/rabbit_binary_parser.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_cert_info.erl21
-rw-r--r--deps/rabbit_common/src/rabbit_channel_common.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_command_assembler.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_control_misc.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_core_metrics.erl11
-rw-r--r--deps/rabbit_common/src/rabbit_data_coercion.erl11
-rw-r--r--deps/rabbit_common/src/rabbit_date_time.erl48
-rw-r--r--deps/rabbit_common/src/rabbit_env.erl348
-rw-r--r--deps/rabbit_common/src/rabbit_error_logger_handler.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_event.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_exchange_type.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_heartbeat.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_json.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_log.erl128
-rw-r--r--deps/rabbit_common/src/rabbit_log_osiris_shim.erl26
-rw-r--r--deps/rabbit_common/src/rabbit_log_ra_shim.erl16
-rw-r--r--deps/rabbit_common/src/rabbit_misc.erl4
-rw-r--r--deps/rabbit_common/src/rabbit_msg_store_index.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_net.erl38
-rw-r--r--deps/rabbit_common/src/rabbit_nodes_common.erl50
-rw-r--r--deps/rabbit_common/src/rabbit_password_hashing.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_pbe.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_peer_discovery_backend.erl6
-rw-r--r--deps/rabbit_common/src/rabbit_policy_validator.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_queue_collector.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_registry.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_registry_class.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_resource_monitor_misc.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_runtime.erl6
-rw-r--r--deps/rabbit_common/src/rabbit_runtime_parameter.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_ssl_options.erl6
-rw-r--r--deps/rabbit_common/src/rabbit_types.erl2
-rw-r--r--deps/rabbit_common/src/rabbit_writer.erl2
-rw-r--r--deps/rabbit_common/src/supervisor2.erl2
-rw-r--r--deps/rabbit_common/src/vm_memory_monitor.erl46
-rw-r--r--deps/rabbit_common/src/worker_pool.erl2
-rw-r--r--deps/rabbit_common/src/worker_pool_sup.erl2
-rw-r--r--deps/rabbit_common/src/worker_pool_worker.erl2
-rw-r--r--deps/rabbit_common/test/gen_server2_test_server.erl2
-rw-r--r--deps/rabbit_common/test/rabbit_env_SUITE.erl106
-rw-r--r--deps/rabbit_common/test/supervisor2_SUITE.erl2
-rw-r--r--deps/rabbit_common/test/unit_SUITE.erl45
-rw-r--r--deps/rabbit_common/test/unit_priority_queue_SUITE.erl2
-rw-r--r--deps/rabbit_common/test/worker_pool_SUITE.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/BUILD.bazel107
-rw-r--r--deps/rabbitmq_amqp1_0/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_amqp1_0/Makefile4
-rw-r--r--deps/rabbitmq_amqp1_0/erlang.mk7808
-rw-r--r--deps/rabbitmq_amqp1_0/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_amqp1_0/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListAmqp10ConnectionsCommand.erl11
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl4
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl29
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl32
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl32
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl4
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl29
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl8
-rw-r--r--deps/rabbitmq_amqp1_0/test/amqp10_client_SUITE.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/test/command_SUITE.erl2
-rw-r--r--deps/rabbitmq_amqp1_0/test/proxy_protocol_SUITE.erl6
-rw-r--r--deps/rabbitmq_amqp1_0/test/system_SUITE.erl77
-rwxr-xr-xdeps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/Program.fs46
-rwxr-xr-xdeps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/fsharp-tests.fsproj8
-rw-r--r--deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/global.json5
-rw-r--r--deps/rabbitmq_amqp1_0/test/system_SUITE_data/java-tests/pom.xml8
-rw-r--r--deps/rabbitmq_amqp1_0/test/unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_cache/BUILD.bazel81
-rw-r--r--deps/rabbitmq_auth_backend_cache/Makefile4
-rw-r--r--deps/rabbitmq_auth_backend_cache/erlang.mk7808
-rw-r--r--deps/rabbitmq_auth_backend_cache/include/rabbit_auth_backend_cache.hrl9
-rw-r--r--deps/rabbitmq_auth_backend_cache/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache.erl2
-rw-r--r--deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache_app.erl2
-rw-r--r--deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache.erl2
-rw-r--r--deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_dict.erl10
-rw-r--r--deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets.erl18
-rw-r--r--deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented.erl10
-rw-r--r--deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented_stateless.erl6
-rw-r--r--deps/rabbitmq_auth_backend_cache/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_cache/test/rabbit_auth_backend_cache_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_cache/test/rabbit_auth_cache_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_http/BUILD.bazel95
-rw-r--r--deps/rabbitmq_auth_backend_http/Makefile8
-rw-r--r--deps/rabbitmq_auth_backend_http/erlang.mk7808
-rw-r--r--deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/pom.xml31
-rw-r--r--deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/src/main/java/com/rabbitmq/examples/AuthBackendHttpController.java14
-rw-r--r--deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot_kotlin/src/main/resources/rabbitmq.conf2
-rw-r--r--deps/rabbitmq_auth_backend_http/priv/schema/rabbitmq_auth_backend_http.schema6
-rw-r--r--deps/rabbitmq_auth_backend_http/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http.erl28
-rw-r--r--deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http_app.erl2
-rw-r--r--deps/rabbitmq_auth_backend_http/test/auth_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_http/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_http/test/config_schema_SUITE_data/rabbitmq_auth_backend_http.snippets17
-rw-r--r--deps/rabbitmq_auth_backend_http/test/unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_ldap/BUILD.bazel114
-rw-r--r--deps/rabbitmq_auth_backend_ldap/Makefile4
-rw-r--r--deps/rabbitmq_auth_backend_ldap/erlang.mk7808
-rw-r--r--deps/rabbitmq_auth_backend_ldap/include/logging.hrl3
-rw-r--r--deps/rabbitmq_auth_backend_ldap/priv/schema/rabbitmq_auth_backend_ldap.schema16
-rw-r--r--deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl51
-rw-r--r--deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl5
-rw-r--r--deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl2
-rw-r--r--deps/rabbitmq_auth_backend_ldap/src/rabbit_log_ldap.erl107
-rw-r--r--deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE_data/rabbitmq_auth_backend_ldap.snippets64
-rw-r--r--deps/rabbitmq_auth_backend_ldap/test/rabbit_ldap_seed.erl2
-rw-r--r--deps/rabbitmq_auth_backend_ldap/test/system_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_ldap/test/unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/BUILD.bazel121
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/Makefile9
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/README.md78
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/demo/rsa_docker/rabbitmq/rabbitmq.conf6
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/erlang.mk7808
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/priv/schema/rabbitmq_auth_backend_oauth2.schema128
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddUaaKeyCommand.erl6
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2.erl4
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2_app.erl2
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/rabbit_oauth2_scope.erl2
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/uaa_jwks.erl27
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt.erl63
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwk.erl2
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwt.erl12
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/src/wildcard.erl4
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/add_uaa_key_command_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE.erl49
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/cacert.pem1
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/cert.pem1
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/key.pem1
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/rabbitmq_auth_backend_oauth2.snippets42
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/jwks_SUITE.erl464
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/jwks_http_app.erl22
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/jwks_http_handler.erl14
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/jwks_http_sup.erl11
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/rabbit_auth_backend_oauth2_test_util.erl4
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/scope_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/system_SUITE.erl28
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_backend_oauth2/test/wildcard_match_SUITE.erl2
-rw-r--r--deps/rabbitmq_auth_mechanism_ssl/BUILD.bazel42
-rw-r--r--deps/rabbitmq_auth_mechanism_ssl/Makefile4
-rw-r--r--deps/rabbitmq_auth_mechanism_ssl/README.md49
-rw-r--r--deps/rabbitmq_auth_mechanism_ssl/erlang.mk7808
-rw-r--r--deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl8
-rw-r--r--deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl4
-rw-r--r--deps/rabbitmq_aws/BUILD.bazel67
-rw-r--r--deps/rabbitmq_aws/Makefile4
-rw-r--r--deps/rabbitmq_aws/README.md17
-rw-r--r--deps/rabbitmq_aws/erlang.mk7808
-rw-r--r--deps/rabbitmq_aws/include/rabbitmq_aws.hrl22
-rw-r--r--deps/rabbitmq_aws/priv/schema/rabbitmq_aws.schema19
-rw-r--r--deps/rabbitmq_aws/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_aws/src/rabbitmq_aws.erl159
-rw-r--r--deps/rabbitmq_aws/src/rabbitmq_aws_config.erl102
-rw-r--r--deps/rabbitmq_aws/src/rabbitmq_aws_json.erl12
-rw-r--r--deps/rabbitmq_aws/src/rabbitmq_aws_sign.erl10
-rw-r--r--deps/rabbitmq_aws/test/src/rabbitmq_aws_config_tests.erl113
-rw-r--r--deps/rabbitmq_aws/test/src/rabbitmq_aws_tests.erl241
-rw-r--r--deps/rabbitmq_cli/.gitignore1
-rw-r--r--deps/rabbitmq_cli/BUILD.bazel42
-rw-r--r--deps/rabbitmq_cli/Makefile18
-rw-r--r--deps/rabbitmq_cli/elixir.bzl45
-rw-r--r--deps/rabbitmq_cli/erlang.mk7296
-rw-r--r--deps/rabbitmq_cli/lib/rabbit_common/records.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/auto_complete.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/command_behaviour.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_default_switches_and_timeout.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_no_positional_arguments.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positional_argument.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positive_integer_argument.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_two_positional_arguments.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/alarms.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/ansi.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/code_path.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/command_modules.ex4
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/config.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/data_coercion.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/distribution.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/doc_guide.ex6
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/erl_eval.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/exit_codes.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/feature_flags.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/helpers.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/input.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/listeners.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/log_files.ex5
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/memory.ex8
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_default_virtual_host.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_no_defaults.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/networking.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/node_name.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/os_pid.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/output.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/parser.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/paths.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/platform.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_running.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_stopped.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/validators.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/core/version.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_user_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_vhost_command.ex5
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/authenticate_user_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/autocomplete_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_online_nodes_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_startup_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cancel_sync_queue_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_cluster_node_type_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_password_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_global_parameter_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_operator_policy_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_parameter_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_password_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_permissions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_policy_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_topic_permissions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_vhost_limits_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_connections_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_user_connections_command.ex50
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_connection_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cluster_status_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/decode_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_queue_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_user_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_vhost_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/enable_feature_flag_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/encode_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/environment_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_file_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/exec_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/export_definitions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_boot_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_gc_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_reset_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/forget_cluster_node_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/help_command.ex4
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/hipe_compile_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/import_definitions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/join_cluster_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_bindings_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_channels_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_ciphers_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_connections_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_consumers_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_exchanges_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_feature_flags_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_global_parameters_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_hashes_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_operator_policies_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_parameters_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_permissions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_policies_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_queues_command.ex6
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_topic_permissions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_unresponsive_queues_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_permissions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_topic_permissions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_users_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhost_limits_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhosts_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/node_health_check_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/ping_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/purge_queue_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rename_cluster_node_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/report_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/reset_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/restart_vhost_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/resume_listeners_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rotate_logs_command.ex16
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_cluster_name_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_disk_free_limit_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_global_parameter_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_log_level_command.ex4
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_operator_policy_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_parameter_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_permissions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_policy_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_topic_permissions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_user_tags_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_limits_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_tags_command.ex60
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vm_memory_high_watermark_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/shutdown_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/start_app_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/status_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_app_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/suspend_listeners_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/sync_queue_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_off_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_on_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/update_cluster_nodes_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/version_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/wait_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/info_keys.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/rpc_stream.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/default_output.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/alarms_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/certificates_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_alarms_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_certificate_expiration_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_local_alarms_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_connectivity_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_listener_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_protocol_listener_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_running_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_virtual_hosts_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/cipher_suites_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/command_line_arguments_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/consume_event_stream_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/disable_auth_attempt_source_tracking_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/discover_peers_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/enable_auth_attempt_source_tracking_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_hash_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_sources_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_version_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_booting_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_running_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_network_interfaces_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_node_auth_attempt_stats_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/listeners_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_location_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_stream_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/maybe_stuck_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/memory_breakdown_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/observer_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/os_env_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/remote_shell_command.ex35
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/reset_node_auth_attempt_metrics_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolve_hostname_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolver_info_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/runtime_thread_stats_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/schema_info_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/server_version_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/tls_versions_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/diagnostics_helpers.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatter_behaviour.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/csv.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/erlang.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/formatter_helpers.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/inspect.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json_stream.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/msacc.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/plugins.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/pretty_table.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/report.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string_per_line.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/table.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/information_unit.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/directories_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/disable_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/enable_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/is_enabled.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/list_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/set_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/error_output.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/plugins_helpers.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/printer_behaviour.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/printers/file.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io_raw.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/add_member_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_mirror_sync_critical_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_quorum_critical_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/delete_member_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/grow_command.ex6
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/peek_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/quorum_status_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/rebalance_command.ex16
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/reclaim_quorum_memory_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/shrink_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/add_replica_command.ex4
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/delete_replica_command.ex6
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/set_stream_retention_policy_command.ex20
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/stream_status_command.ex71
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/time_unit.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_quorum_plus_one_command.ex34
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_synchronized_mirror_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/drain_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/post_upgrade_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/revive_command.ex2
-rw-r--r--deps/rabbitmq_cli/lib/rabbitmqctl.ex2
-rw-r--r--deps/rabbitmq_cli/mix.exs31
-rw-r--r--deps/rabbitmq_cli/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_cli/rabbitmqctl.bzl163
-rw-r--r--deps/rabbitmq_cli/rabbitmqctl_test.bzl151
-rw-r--r--deps/rabbitmq_cli/test/ctl/clear_global_parameter_command_test.exs4
-rw-r--r--deps/rabbitmq_cli/test/ctl/clear_policy_command_test.exs4
-rw-r--r--deps/rabbitmq_cli/test/ctl/close_all_user_connections_command_test.exs90
-rw-r--r--deps/rabbitmq_cli/test/ctl/resume_listeners_command_test.exs16
-rw-r--r--deps/rabbitmq_cli/test/ctl/rotate_logs_command_test.exs40
-rw-r--r--deps/rabbitmq_cli/test/ctl/set_user_tags_command_test.exs22
-rw-r--r--deps/rabbitmq_cli/test/ctl/set_vhost_tags_command_test.exs139
-rw-r--r--deps/rabbitmq_cli/test/ctl/suspend_listeners_command_test.exs16
-rw-r--r--deps/rabbitmq_cli/test/diagnostics/log_location_command_test.exs31
-rw-r--r--deps/rabbitmq_cli/test/diagnostics/log_tail_command_test.exs23
-rw-r--r--deps/rabbitmq_cli/test/diagnostics/log_tail_stream_command_test.exs14
-rw-r--r--deps/rabbitmq_cli/test/diagnostics/remote_shell_command_test.exs40
-rw-r--r--deps/rabbitmq_cli/test/fixtures/plugins/plugins_with_version_requirements/mock_rabbitmq_plugin_for_3_8-0.1.0/ebin/mock_rabbitmq_plugin_for_3_8.app4
-rw-r--r--deps/rabbitmq_cli/test/plugins/enable_plugins_command_test.exs6
-rw-r--r--deps/rabbitmq_cli/test/plugins/set_plugins_command_test.exs6
-rw-r--r--deps/rabbitmq_cli/test/queues/stream_status_command_test.exs45
-rw-r--r--deps/rabbitmq_cli/test/streams/add_replica_command_test.exs15
-rw-r--r--deps/rabbitmq_cli/test/streams/delete_replica_command_test.exs24
-rw-r--r--deps/rabbitmq_cli/test/streams/set_stream_retention_policy_command_test.exs16
-rw-r--r--deps/rabbitmq_cli/test/test_helper.exs21
-rw-r--r--deps/rabbitmq_cli/test/upgrade/drain_command_test.exs2
-rw-r--r--deps/rabbitmq_cli/test/upgrade/revive_command_test.exs2
-rw-r--r--deps/rabbitmq_codegen/BUILD.bazel18
-rw-r--r--deps/rabbitmq_codegen/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_consistent_hash_exchange/BUILD.bazel58
-rw-r--r--deps/rabbitmq_consistent_hash_exchange/Makefile4
-rw-r--r--deps/rabbitmq_consistent_hash_exchange/erlang.mk7808
-rw-r--r--deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_consistent_hash_exchange/src/Elixir.RabbitMQ.CLI.Diagnostics.Commands.ConsistentHashExchangeRingStateCommand.erl7
-rw-r--r--deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl11
-rw-r--r--deps/rabbitmq_consistent_hash_exchange/test/rabbit_exchange_type_consistent_hash_SUITE.erl99
-rw-r--r--deps/rabbitmq_ct_client_helpers/.gitignore23
-rw-r--r--deps/rabbitmq_ct_client_helpers/BUILD.bazel11
-rw-r--r--deps/rabbitmq_ct_client_helpers/CODE_OF_CONDUCT.md44
-rw-r--r--deps/rabbitmq_ct_client_helpers/CONTRIBUTING.md38
-rw-r--r--deps/rabbitmq_ct_client_helpers/LICENSE4
-rw-r--r--deps/rabbitmq_ct_client_helpers/LICENSE-MPL-RabbitMQ373
-rw-r--r--deps/rabbitmq_ct_client_helpers/Makefile16
-rw-r--r--deps/rabbitmq_ct_client_helpers/WORKSPACE.bazel24
-rw-r--r--deps/rabbitmq_ct_client_helpers/erlang.mk (renamed from deps/amqp10_client/erlang.mk)366
-rw-r--r--deps/rabbitmq_ct_client_helpers/rabbitmq-components.mk (renamed from deps/amqp10_client/rabbitmq-components.mk)2
-rw-r--r--deps/rabbitmq_ct_client_helpers/src/rabbit_ct_client_helpers.erl302
-rw-r--r--deps/rabbitmq_ct_helpers/.gitignore27
-rw-r--r--deps/rabbitmq_ct_helpers/BUILD.bazel14
-rw-r--r--deps/rabbitmq_ct_helpers/CODE_OF_CONDUCT.md44
-rw-r--r--deps/rabbitmq_ct_helpers/CONTRIBUTING.md38
-rw-r--r--deps/rabbitmq_ct_helpers/LICENSE12
-rw-r--r--deps/rabbitmq_ct_helpers/LICENSE-APACHE2202
-rw-r--r--deps/rabbitmq_ct_helpers/LICENSE-MPL-RabbitMQ373
-rw-r--r--deps/rabbitmq_ct_helpers/Makefile25
-rw-r--r--deps/rabbitmq_ct_helpers/WORKSPACE.bazel18
-rw-r--r--deps/rabbitmq_ct_helpers/erlang.mk (renamed from deps/amqp10_common/erlang.mk)366
-rw-r--r--deps/rabbitmq_ct_helpers/include/rabbit_assert.hrl49
-rw-r--r--deps/rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl11
-rw-r--r--deps/rabbitmq_ct_helpers/rabbitmq-components.mk (renamed from deps/amqp10_common/rabbitmq-components.mk)6
-rw-r--r--deps/rabbitmq_ct_helpers/src/rabbit_control_helper.erl46
-rw-r--r--deps/rabbitmq_ct_helpers/src/rabbit_ct_broker_helpers.erl1921
-rw-r--r--deps/rabbitmq_ct_helpers/src/rabbit_ct_config_schema.erl107
-rw-r--r--deps/rabbitmq_ct_helpers/src/rabbit_ct_helpers.erl1056
-rw-r--r--deps/rabbitmq_ct_helpers/src/rabbit_ct_proper_helpers.erl21
-rw-r--r--deps/rabbitmq_ct_helpers/src/rabbit_ct_vm_helpers.erl1140
-rw-r--r--deps/rabbitmq_ct_helpers/src/rabbit_mgmt_test_util.erl323
-rw-r--r--deps/rabbitmq_ct_helpers/test/terraform_SUITE.erl166
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/main.tf78
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/outputs.tf5
-rwxr-xr-xdeps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/setup-vms.sh178
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/variables.tf80
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/main.tf234
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/outputs.tf59
-rwxr-xr-xdeps/rabbitmq_ct_helpers/tools/terraform/direct-vms/setup-vms.sh187
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/templates/setup-erlang.sh264
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/variables.tf147
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/vms-query/main.tf31
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/vms-query/outputs.tf21
-rwxr-xr-xdeps/rabbitmq_ct_helpers/tools/terraform/vms-query/query-vms.sh55
-rw-r--r--deps/rabbitmq_ct_helpers/tools/terraform/vms-query/variables.tf14
-rw-r--r--deps/rabbitmq_ct_helpers/tools/tls-certs/.gitignore1
-rw-r--r--deps/rabbitmq_ct_helpers/tools/tls-certs/Makefile70
-rw-r--r--deps/rabbitmq_ct_helpers/tools/tls-certs/openssl.cnf.in62
-rw-r--r--deps/rabbitmq_event_exchange/BUILD.bazel60
-rw-r--r--deps/rabbitmq_event_exchange/Makefile4
-rw-r--r--deps/rabbitmq_event_exchange/erlang.mk7808
-rw-r--r--deps/rabbitmq_event_exchange/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_event_exchange/src/rabbit_event_exchange_decorator.erl2
-rw-r--r--deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl6
-rw-r--r--deps/rabbitmq_event_exchange/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_event_exchange/test/system_SUITE.erl2
-rw-r--r--deps/rabbitmq_event_exchange/test/unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_federation/BUILD.bazel135
-rw-r--r--deps/rabbitmq_federation/Makefile4
-rw-r--r--deps/rabbitmq_federation/erlang.mk7808
-rw-r--r--deps/rabbitmq_federation/include/logging.hrl3
-rw-r--r--deps/rabbitmq_federation/include/rabbit_federation.hrl8
-rw-r--r--deps/rabbitmq_federation/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.FederationStatusCommand.erl4
-rw-r--r--deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartFederationLinkCommand.erl4
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_app.erl18
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_db.erl2
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_event.erl2
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_exchange.erl2
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl58
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl7
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl2
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_link_util.erl14
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_parameters.erl9
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_pg.erl25
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_queue.erl5
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl49
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl9
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_status.erl2
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_sup.erl46
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_upstream.erl6
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl2
-rw-r--r--deps/rabbitmq_federation/src/rabbit_federation_util.erl2
-rw-r--r--deps/rabbitmq_federation/src/rabbit_log_federation.erl107
-rw-r--r--deps/rabbitmq_federation/test/exchange_SUITE.erl306
-rw-r--r--deps/rabbitmq_federation/test/federation_status_command_SUITE.erl29
-rw-r--r--deps/rabbitmq_federation/test/queue_SUITE.erl209
-rw-r--r--deps/rabbitmq_federation/test/rabbit_federation_status_SUITE.erl2
-rw-r--r--deps/rabbitmq_federation/test/rabbit_federation_test_util.erl50
-rw-r--r--deps/rabbitmq_federation/test/restart_federation_link_command_SUITE.erl2
-rw-r--r--deps/rabbitmq_federation/test/unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_federation/test/unit_inbroker_SUITE.erl2
-rw-r--r--deps/rabbitmq_federation_management/BUILD.bazel64
-rw-r--r--deps/rabbitmq_federation_management/Makefile4
-rw-r--r--deps/rabbitmq_federation_management/erlang.mk7808
-rw-r--r--deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs2
-rw-r--r--deps/rabbitmq_federation_management/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl2
-rw-r--r--deps/rabbitmq_federation_management/test/federation_mgmt_SUITE.erl10
-rw-r--r--deps/rabbitmq_jms_topic_exchange/BUILD.bazel69
-rw-r--r--deps/rabbitmq_jms_topic_exchange/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_jms_topic_exchange/LICENSES.txt736
-rw-r--r--deps/rabbitmq_jms_topic_exchange/Makefile4
-rw-r--r--deps/rabbitmq_jms_topic_exchange/erlang.mk7808
-rw-r--r--deps/rabbitmq_jms_topic_exchange/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_jms_topic_exchange/src/rabbit_jms_topic_exchange.erl2
-rw-r--r--deps/rabbitmq_jms_topic_exchange/src/sjx_evaluator.erl2
-rw-r--r--deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_SUITE.erl2
-rw-r--r--deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/.gitignore8
-rw-r--r--deps/rabbitmq_management/BUILD.bazel203
-rw-r--r--deps/rabbitmq_management/Makefile4
-rw-r--r--deps/rabbitmq_management/README.md2
-rw-r--r--deps/rabbitmq_management/erlang.mk7808
-rw-r--r--deps/rabbitmq_management/include/rabbit_mgmt.hrl20
-rw-r--r--deps/rabbitmq_management/priv/www/api/index.html101
-rw-r--r--deps/rabbitmq_management/priv/www/js/dispatcher.js36
-rw-r--r--deps/rabbitmq_management/priv/www/js/formatters.js39
-rw-r--r--deps/rabbitmq_management/priv/www/js/global.js27
-rw-r--r--deps/rabbitmq_management/priv/www/js/main.js40
-rw-r--r--deps/rabbitmq_management/priv/www/js/tmpl/channel.ejs2
-rw-r--r--deps/rabbitmq_management/priv/www/js/tmpl/connection.ejs4
-rw-r--r--deps/rabbitmq_management/priv/www/js/tmpl/consumers.ejs4
-rw-r--r--deps/rabbitmq_management/priv/www/js/tmpl/policies.ejs9
-rw-r--r--deps/rabbitmq_management/priv/www/js/tmpl/queue.ejs40
-rw-r--r--deps/rabbitmq_management/priv/www/js/tmpl/queues.ejs10
-rw-r--r--deps/rabbitmq_management/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_app.erl9
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_cors.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_csp.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_db.erl14
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_db_cache.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_db_cache_sup.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_dispatcher.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_extension.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_headers.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_hsts.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_load_definitions.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_reset_handler.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_stats.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_sup.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_sup_sup.erl39
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_util.erl21
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_aliveness_test.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_auth.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_auth_attempts.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_binding.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_bindings.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_channel.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_channels.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_cluster_name.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_connection.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_connection_channels.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_connections.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_consumers.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_definitions.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange_publish.erl5
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_exchanges.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_extensions.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flag_enable.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flags.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameter.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameters.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_alarms.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_certificate_expiration.erl4
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_local_alarms.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_mirror_sync_critical.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_quorum_critical.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_port_listener.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_protocol_listener.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_virtual_hosts.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_healthchecks.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_limit.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_limits.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_login.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_node.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory_ets.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_nodes.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policies.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policy.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_overview.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_parameter.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_parameters.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_permission.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_user.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_vhost.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_policies.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_policy.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_queue.erl16
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_actions.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_get.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_purge.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_queues.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_rebalance_queues.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_redirect.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_reset.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_static.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permission.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_user.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_vhost.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_user.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limit.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limits.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_users.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_users_bulk_delete.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost_restart.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_vhosts.erl2
-rw-r--r--deps/rabbitmq_management/src/rabbit_mgmt_wm_whoami.erl2
-rw-r--r--deps/rabbitmq_management/test/cache_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/test/clustering_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/test/clustering_prop_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/test/listener_config_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/test/rabbit_mgmt_http_SUITE.erl80
-rw-r--r--deps/rabbitmq_management/test/rabbit_mgmt_http_health_checks_SUITE.erl11
-rw-r--r--deps/rabbitmq_management/test/rabbit_mgmt_only_http_SUITE.erl33
-rw-r--r--deps/rabbitmq_management/test/rabbit_mgmt_rabbitmqadmin_SUITE.erl4
-rw-r--r--deps/rabbitmq_management/test/rabbit_mgmt_runtime_parameters_util.erl2
-rw-r--r--deps/rabbitmq_management/test/rabbit_mgmt_stats_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/test/rabbit_mgmt_test_db_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/test/rabbit_mgmt_test_unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_management/test/stats_SUITE.erl2
-rw-r--r--deps/rabbitmq_management_agent/BUILD.bazel111
-rw-r--r--deps/rabbitmq_management_agent/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_management_agent/Makefile4
-rw-r--r--deps/rabbitmq_management_agent/erlang.mk7808
-rw-r--r--deps/rabbitmq_management_agent/include/rabbit_mgmt_agent.hrl9
-rw-r--r--deps/rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl17
-rw-r--r--deps/rabbitmq_management_agent/include/rabbit_mgmt_records.hrl17
-rw-r--r--deps/rabbitmq_management_agent/priv/schema/rabbitmq_management_agent.schema1
-rw-r--r--deps/rabbitmq_management_agent/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_management_agent/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ResetStatsDbCommand.erl4
-rw-r--r--deps/rabbitmq_management_agent/src/exometer_slide.erl2
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_app.erl2
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_config.erl2
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup.erl16
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup_sup.erl37
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_data.erl24
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_data_compat.erl2
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_db_handler.erl14
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_external_stats.erl12
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_ff.erl2
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_format.erl17
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_gc.erl4
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_collector.erl34
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_gc.erl2
-rw-r--r--deps/rabbitmq_management_agent/src/rabbit_mgmt_storage.erl2
-rw-r--r--deps/rabbitmq_management_agent/test/exometer_slide_SUITE.erl2
-rw-r--r--deps/rabbitmq_management_agent/test/metrics_SUITE.erl2
-rw-r--r--deps/rabbitmq_management_agent/test/rabbit_mgmt_gc_SUITE.erl23
-rw-r--r--deps/rabbitmq_management_agent/test/rabbit_mgmt_slide_SUITE.erl2
-rw-r--r--deps/rabbitmq_mqtt/.gitignore1
-rw-r--r--deps/rabbitmq_mqtt/BUILD.bazel177
-rw-r--r--deps/rabbitmq_mqtt/Makefile4
-rw-r--r--deps/rabbitmq_mqtt/erlang.mk7808
-rw-r--r--deps/rabbitmq_mqtt/include/mqtt_machine.hrl9
-rw-r--r--deps/rabbitmq_mqtt/include/mqtt_machine_v0.hrl (renamed from deps/rabbit_common/include/rabbit_log.hrl)4
-rw-r--r--deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl2
-rw-r--r--deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl2
-rw-r--r--deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl2
-rw-r--r--deps/rabbitmq_mqtt/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand.erl4
-rw-r--r--deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand.erl10
-rw-r--r--deps/rabbitmq_mqtt/src/mqtt_machine.erl152
-rw-r--r--deps/rabbitmq_mqtt/src/mqtt_machine_v0.erl134
-rw-r--r--deps/rabbitmq_mqtt/src/mqtt_node.erl22
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_info.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl6
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_internal_event_handler.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl229
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl50
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_noop.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl2
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl4
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl16
-rw-r--r--deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl2
-rw-r--r--deps/rabbitmq_mqtt/test/auth_SUITE.erl344
-rw-r--r--deps/rabbitmq_mqtt/test/cluster_SUITE.erl2
-rw-r--r--deps/rabbitmq_mqtt/test/command_SUITE.erl2
-rw-r--r--deps/rabbitmq_mqtt/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_mqtt/test/java_SUITE.erl2
-rw-r--r--deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/MqttTest.java4
-rw-r--r--deps/rabbitmq_mqtt/test/mqtt_machine_SUITE.erl54
-rw-r--r--deps/rabbitmq_mqtt/test/processor_SUITE.erl2
-rw-r--r--deps/rabbitmq_mqtt/test/proxy_protocol_SUITE.erl6
-rw-r--r--deps/rabbitmq_mqtt/test/rabbit_auth_backend_mqtt_mock.erl14
-rw-r--r--deps/rabbitmq_mqtt/test/reader_SUITE.erl2
-rw-r--r--deps/rabbitmq_mqtt/test/retainer_SUITE.erl2
-rw-r--r--deps/rabbitmq_mqtt/test/util_SUITE.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_aws/BUILD.bazel94
-rw-r--r--deps/rabbitmq_peer_discovery_aws/Makefile6
-rw-r--r--deps/rabbitmq_peer_discovery_aws/README.md2
-rw-r--r--deps/rabbitmq_peer_discovery_aws/erlang.mk7808
-rw-r--r--deps/rabbitmq_peer_discovery_aws/priv/schema/rabbitmq_peer_discovery_aws.schema2
-rw-r--r--deps/rabbitmq_peer_discovery_aws/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_peer_discovery_aws/src/rabbit_peer_discovery_aws.erl104
-rw-r--r--deps/rabbitmq_peer_discovery_aws/src/rabbitmq_peer_discovery_aws.erl6
-rw-r--r--deps/rabbitmq_peer_discovery_aws/test/aws_ecs_util.erl358
-rw-r--r--deps/rabbitmq_peer_discovery_aws/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_aws/test/integration_SUITE.erl257
-rw-r--r--deps/rabbitmq_peer_discovery_aws/test/integration_SUITE_data/task_definition.json77
-rw-r--r--deps/rabbitmq_peer_discovery_aws/test/unit_SUITE.erl (renamed from deps/rabbitmq_peer_discovery_aws/test/rabbitmq_peer_discovery_aws_SUITE.erl)50
-rw-r--r--deps/rabbitmq_peer_discovery_common/BUILD.bazel69
-rw-r--r--deps/rabbitmq_peer_discovery_common/Makefile4
-rw-r--r--deps/rabbitmq_peer_discovery_common/erlang.mk7808
-rw-r--r--deps/rabbitmq_peer_discovery_common/include/rabbit_peer_discovery.hrl8
-rw-r--r--deps/rabbitmq_peer_discovery_common/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_cleanup.erl63
-rw-r--r--deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_app.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_sup.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_config.erl13
-rw-r--r--deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_httpc.erl54
-rw-r--r--deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_util.erl62
-rw-r--r--deps/rabbitmq_peer_discovery_common/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_consul/BUILD.bazel65
-rw-r--r--deps/rabbitmq_peer_discovery_consul/Makefile4
-rw-r--r--deps/rabbitmq_peer_discovery_consul/erlang.mk7808
-rw-r--r--deps/rabbitmq_peer_discovery_consul/include/rabbit_peer_discovery_consul.hrl2
-rw-r--r--deps/rabbitmq_peer_discovery_consul/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_peer_discovery_consul/src/rabbit_peer_discovery_consul.erl108
-rw-r--r--deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_app.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_health_check_helper.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_sup.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_consul/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_consul/test/rabbitmq_peer_discovery_consul_SUITE.erl8
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/BUILD.bazel69
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/Makefile4
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/erlang.mk7808
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/include/rabbit_peer_discovery_etcd.hrl2
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/src/rabbit_peer_discovery_etcd.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_app.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_sup.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_v3_client.erl4
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE_data/rabbitmq_peer_discovery_etcd.snippets6
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/test/system_SUITE.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_etcd/test/unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/BUILD.bazel65
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/Makefile4
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/erlang.mk7808
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/include/rabbit_peer_discovery_k8s.hrl2
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/src/rabbit_peer_discovery_k8s.erl115
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s.erl15
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_app.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_node_monitor.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_sup.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE_data/rabbitmq_peer_discovery_k8s.snippets8
-rw-r--r--deps/rabbitmq_peer_discovery_k8s/test/rabbitmq_peer_discovery_k8s_SUITE.erl60
-rw-r--r--deps/rabbitmq_prometheus/.dockerignore8
-rw-r--r--deps/rabbitmq_prometheus/BUILD.bazel73
-rw-r--r--deps/rabbitmq_prometheus/Dockerfile315
-rw-r--r--deps/rabbitmq_prometheus/Makefile173
-rw-r--r--deps/rabbitmq_prometheus/README.md23
-rw-r--r--deps/rabbitmq_prometheus/docker/docker-compose-dist-metrics.yml77
-rw-r--r--deps/rabbitmq_prometheus/docker/docker-compose-dist-tls.yml15
-rw-r--r--deps/rabbitmq_prometheus/docker/docker-compose-metrics.yml6
-rw-r--r--deps/rabbitmq_prometheus/docker/docker-compose-overview.yml19
-rw-r--r--deps/rabbitmq_prometheus/docker/docker-compose-qq.yml9
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distribution.json291
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distributions-Compare.json74
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Memory-Allocators.json148
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Overview.json714
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-PerfTest.json24
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Quorum-Queues-Raft.json150
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Stream.json3005
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/inet_tcp_metrics.json1715
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/dashboards/rabbitmq-exporter_vs_rabbitmq-prometheus.json4
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-14798.md32
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-bottom-2021-07-29-original.pngbin0 -> 1003243 bytes
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-bottom-2021-07-29.jpgbin0 -> 499192 bytes
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-logo-2021-07-29-original.jpgbin0 -> 424228 bytes
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-top-2021-07-29-original.pngbin0 -> 1375550 bytes
-rw-r--r--deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-top-2021-07-29.jpgbin0 -> 498885 bytes
-rw-r--r--deps/rabbitmq_prometheus/docker/prometheus.yml3
-rw-r--r--deps/rabbitmq_prometheus/docker/rabbitmq-dist-metrics-definitions.json49
-rw-r--r--deps/rabbitmq_prometheus/docker/rabbitmq-dist-metrics.conf32
-rw-r--r--deps/rabbitmq_prometheus/docker/rabbitmq-dist-tls.conf2
-rw-r--r--deps/rabbitmq_prometheus/docker/rabbitmq-overview.conf2
-rw-r--r--deps/rabbitmq_prometheus/docker/rabbitmq-qq.conf2
-rw-r--r--deps/rabbitmq_prometheus/erlang.mk7686
-rw-r--r--deps/rabbitmq_prometheus/metrics-detailed.md267
-rw-r--r--deps/rabbitmq_prometheus/metrics.md340
-rw-r--r--deps/rabbitmq_prometheus/priv/schema/rabbitmq_prometheus.schema35
-rw-r--r--deps/rabbitmq_prometheus/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_alarm_metrics_collector.erl80
-rw-r--r--deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_core_metrics_collector.erl435
-rw-r--r--deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_global_metrics_collector.erl50
-rw-r--r--deps/rabbitmq_prometheus/src/rabbit_prometheus_app.erl16
-rw-r--r--deps/rabbitmq_prometheus/src/rabbit_prometheus_dispatcher.erl20
-rw-r--r--deps/rabbitmq_prometheus/src/rabbit_prometheus_handler.erl67
-rw-r--r--deps/rabbitmq_prometheus/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_prometheus/test/config_schema_SUITE_data/rabbitmq_prometheus.snippets60
-rw-r--r--deps/rabbitmq_prometheus/test/rabbit_prometheus_http_SUITE.erl358
-rw-r--r--deps/rabbitmq_random_exchange/BUILD.bazel34
-rw-r--r--deps/rabbitmq_random_exchange/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_random_exchange/Makefile4
-rw-r--r--deps/rabbitmq_random_exchange/erlang.mk7808
-rw-r--r--deps/rabbitmq_random_exchange/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_random_exchange/src/rabbit_exchange_type_random.erl2
-rw-r--r--deps/rabbitmq_recent_history_exchange/BUILD.bazel53
-rw-r--r--deps/rabbitmq_recent_history_exchange/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_recent_history_exchange/Makefile4
-rw-r--r--deps/rabbitmq_recent_history_exchange/erlang.mk7808
-rw-r--r--deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl2
-rw-r--r--deps/rabbitmq_recent_history_exchange/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl4
-rw-r--r--deps/rabbitmq_recent_history_exchange/test/system_SUITE.erl2
-rw-r--r--deps/rabbitmq_sharding/BUILD.bazel56
-rw-r--r--deps/rabbitmq_sharding/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_sharding/Makefile4
-rw-r--r--deps/rabbitmq_sharding/README.md54
-rw-r--r--deps/rabbitmq_sharding/erlang.mk7808
-rw-r--r--deps/rabbitmq_sharding/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl2
-rw-r--r--deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl2
-rw-r--r--deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl2
-rw-r--r--deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl2
-rw-r--r--deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl4
-rw-r--r--deps/rabbitmq_sharding/src/rabbit_sharding_util.erl2
-rw-r--r--deps/rabbitmq_sharding/test/rabbit_hash_exchange_SUITE.erl (renamed from deps/rabbitmq_sharding/test/src/rabbit_hash_exchange_SUITE.erl)2
-rw-r--r--deps/rabbitmq_sharding/test/rabbit_sharding_SUITE.erl (renamed from deps/rabbitmq_sharding/test/src/rabbit_sharding_SUITE.erl)2
-rw-r--r--deps/rabbitmq_shovel/BUILD.bazel163
-rw-r--r--deps/rabbitmq_shovel/Makefile4
-rw-r--r--deps/rabbitmq_shovel/erlang.mk7808
-rw-r--r--deps/rabbitmq_shovel/include/logging.hrl3
-rw-r--r--deps/rabbitmq_shovel/include/rabbit_shovel.hrl2
-rw-r--r--deps/rabbitmq_shovel/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteShovelCommand.erl7
-rw-r--r--deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartShovelCommand.erl4
-rw-r--r--deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ShovelStatusCommand.erl4
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_amqp091_shovel.erl17
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_amqp10_shovel.erl16
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_log_shovel.erl107
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel.erl2
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_behaviour.erl8
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_config.erl2
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup.erl15
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl48
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_locks.erl32
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_parameters.erl87
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_status.erl17
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_sup.erl2
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_util.erl11
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_worker.erl33
-rw-r--r--deps/rabbitmq_shovel/src/rabbit_shovel_worker_sup.erl2
-rw-r--r--deps/rabbitmq_shovel/test/amqp10_SUITE.erl2
-rw-r--r--deps/rabbitmq_shovel/test/amqp10_dynamic_SUITE.erl6
-rw-r--r--deps/rabbitmq_shovel/test/amqp10_shovel_SUITE.erl2
-rw-r--r--deps/rabbitmq_shovel/test/config_SUITE.erl2
-rw-r--r--deps/rabbitmq_shovel/test/configuration_SUITE.erl2
-rw-r--r--deps/rabbitmq_shovel/test/delete_shovel_command_SUITE.erl2
-rw-r--r--deps/rabbitmq_shovel/test/dynamic_SUITE.erl54
-rw-r--r--deps/rabbitmq_shovel/test/parameters_SUITE.erl30
-rw-r--r--deps/rabbitmq_shovel/test/shovel_status_command_SUITE.erl2
-rw-r--r--deps/rabbitmq_shovel/test/shovel_test_utils.erl2
-rw-r--r--deps/rabbitmq_shovel_management/BUILD.bazel74
-rw-r--r--deps/rabbitmq_shovel_management/Makefile4
-rw-r--r--deps/rabbitmq_shovel_management/erlang.mk7808
-rw-r--r--deps/rabbitmq_shovel_management/priv/www/js/shovel.js4
-rw-r--r--deps/rabbitmq_shovel_management/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_shovel_management/src/rabbit_shovel_mgmt.erl2
-rw-r--r--deps/rabbitmq_shovel_management/test/http_SUITE.erl23
-rw-r--r--deps/rabbitmq_stomp/BUILD.bazel166
-rw-r--r--deps/rabbitmq_stomp/Makefile4
-rw-r--r--deps/rabbitmq_stomp/erlang.mk7808
-rw-r--r--deps/rabbitmq_stomp/include/rabbit_stomp.hrl2
-rw-r--r--deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl2
-rw-r--r--deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl3
-rw-r--r--deps/rabbitmq_stomp/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl25
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp.erl39
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl21
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl18
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl37
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl17
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl51
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl43
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl32
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_util.erl19
-rw-r--r--deps/rabbitmq_stomp/test/command_SUITE.erl2
-rw-r--r--deps/rabbitmq_stomp/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_stomp/test/connections_SUITE.erl2
-rw-r--r--deps/rabbitmq_stomp/test/frame_SUITE.erl23
-rw-r--r--deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl8
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE.erl65
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile2
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py33
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py18
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py382
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_disconnect.py135
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py174
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py28
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/implicit_connect.py (renamed from deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_options.py)39
-rwxr-xr-xdeps/rabbitmq_stomp/test/python_SUITE_data/src/implicit_connect_runner.py9
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/lifecycle.py187
-rwxr-xr-xdeps/rabbitmq_stomp/test/python_SUITE_data/src/main_runner.py (renamed from deps/rabbitmq_stomp/test/python_SUITE_data/src/test.py)8
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py8
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py8
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py15
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py10
-rwxr-xr-xdeps/rabbitmq_stomp/test/python_SUITE_data/src/test_connect_options.py15
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py2
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py4
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/tls_connect_disconnect.py (renamed from deps/rabbitmq_stomp/test/python_SUITE_data/src/ssl_lifecycle.py)15
-rwxr-xr-xdeps/rabbitmq_stomp/test/python_SUITE_data/src/tls_runner.py (renamed from deps/rabbitmq_stomp/test/python_SUITE_data/src/test_ssl.py)4
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py8
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py20
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/unsubscribe.py88
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py12
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py11
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_stream.py72
-rw-r--r--deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl2
-rw-r--r--deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl2
-rw-r--r--deps/rabbitmq_stomp/test/system_SUITE.erl (renamed from deps/rabbitmq_stomp/test/amqqueue_SUITE.erl)4
-rw-r--r--deps/rabbitmq_stomp/test/topic_SUITE.erl2
-rw-r--r--deps/rabbitmq_stomp/test/util_SUITE.erl2
-rw-r--r--deps/rabbitmq_stream/.gitignore2
-rw-r--r--deps/rabbitmq_stream/BUILD.bazel105
-rw-r--r--deps/rabbitmq_stream/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_stream/Makefile13
-rw-r--r--deps/rabbitmq_stream/README.adoc27
-rw-r--r--deps/rabbitmq_stream/docs/PROTOCOL.adoc597
-rw-r--r--deps/rabbitmq_stream/erlang.mk7712
-rw-r--r--deps/rabbitmq_stream/include/rabbit_stream.hrl70
-rw-r--r--deps/rabbitmq_stream/include/rabbit_stream_metrics.hrl94
-rw-r--r--deps/rabbitmq_stream/priv/schema/rabbitmq_stream.schema59
-rw-r--r--deps/rabbitmq_stream/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_stream/rebar.config12
-rw-r--r--deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddSuperStreamCommand.erl300
-rw-r--r--deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteSuperStreamCommand.erl97
-rw-r--r--deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConnectionsCommand.erl80
-rw-r--r--deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConsumersCommand.erl120
-rw-r--r--deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamPublishersCommand.erl120
-rw-r--r--deps/rabbitmq_stream/src/rabbit_stream.erl202
-rw-r--r--deps/rabbitmq_stream/src/rabbit_stream_connection_sup.erl37
-rw-r--r--deps/rabbitmq_stream/src/rabbit_stream_manager.erl829
-rw-r--r--deps/rabbitmq_stream/src/rabbit_stream_metrics.erl138
-rw-r--r--deps/rabbitmq_stream/src/rabbit_stream_metrics_gc.erl82
-rw-r--r--deps/rabbitmq_stream/src/rabbit_stream_reader.erl3886
-rw-r--r--deps/rabbitmq_stream/src/rabbit_stream_sup.erl116
-rw-r--r--deps/rabbitmq_stream/src/rabbit_stream_utils.erl275
-rw-r--r--deps/rabbitmq_stream/test/command_SUITE.erl136
-rw-r--r--deps/rabbitmq_stream/test/commands_SUITE.erl574
-rw-r--r--deps/rabbitmq_stream/test/config_schema_SUITE.erl47
-rw-r--r--deps/rabbitmq_stream/test/config_schema_SUITE_data/rabbitmq_stream.snippets24
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE.erl613
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/.mvn/wrapper/maven-wrapper.jarbin0 -> 50710 bytes
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/Makefile4
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/pom.xml25
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/ClusterSizeTest.java22
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/FailureTest.java104
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/Host.java23
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/LeaderLocatorTest.java12
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/StreamTest.java21
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java54
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_manager_SUITE.erl180
-rw-r--r--deps/rabbitmq_stream/test/rabbit_stream_utils_SUITE.erl73
-rw-r--r--deps/rabbitmq_stream_common/.gitignore56
-rw-r--r--deps/rabbitmq_stream_common/BUILD.bazel37
-rw-r--r--deps/rabbitmq_stream_common/CODE_OF_CONDUCT.md44
-rw-r--r--deps/rabbitmq_stream_common/CONTRIBUTING.md38
-rw-r--r--deps/rabbitmq_stream_common/LICENSE5
-rw-r--r--deps/rabbitmq_stream_common/LICENSE-MPL-RabbitMQ (renamed from packaging/debs/Debian/debian/copyright)43
-rw-r--r--deps/rabbitmq_stream_common/Makefile24
-rw-r--r--deps/rabbitmq_stream_common/README.adoc23
-rw-r--r--deps/rabbitmq_stream_common/include/rabbit_stream.hrl112
-rw-r--r--deps/rabbitmq_stream_common/rebar.config12
-rw-r--r--deps/rabbitmq_stream_common/src/rabbit_stream_core.erl1005
-rw-r--r--deps/rabbitmq_stream_common/test/rabbit_stream_core_SUITE.erl168
-rw-r--r--deps/rabbitmq_stream_management/.gitignore60
-rw-r--r--deps/rabbitmq_stream_management/BUILD.bazel73
-rw-r--r--deps/rabbitmq_stream_management/CODE_OF_CONDUCT.md44
-rw-r--r--deps/rabbitmq_stream_management/CONTRIBUTING.md38
-rw-r--r--deps/rabbitmq_stream_management/LICENSE5
-rw-r--r--deps/rabbitmq_stream_management/LICENSE-MPL-RabbitMQ370
-rw-r--r--deps/rabbitmq_stream_management/Makefile24
-rw-r--r--deps/rabbitmq_stream_management/README.adoc23
-rw-r--r--deps/rabbitmq_stream_management/priv/www/js/stream.js83
-rw-r--r--deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConnection.ejs197
-rw-r--r--deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConnections.ejs142
-rw-r--r--deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConsumersList.ejs29
-rw-r--r--deps/rabbitmq_stream_management/priv/www/js/tmpl/streamPublishersList.ejs41
-rw-r--r--deps/rabbitmq_stream_management/rebar.config12
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_connection_consumers_mgmt.erl72
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_connection_mgmt.erl159
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_connection_publishers_mgmt.erl72
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_connections_mgmt.erl77
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_connections_vhost_mgmt.erl65
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_consumers_mgmt.erl85
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_management_utils.erl38
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_mgmt_db.erl167
-rw-r--r--deps/rabbitmq_stream_management/src/rabbit_stream_publishers_mgmt.erl108
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE.erl120
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/.gitignore3
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/MavenWrapperDownloader.java117
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/maven-wrapper.jarbin0 -> 50710 bytes
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/maven-wrapper.properties2
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/Makefile15
-rwxr-xr-xdeps/rabbitmq_stream_management/test/http_SUITE_data/mvnw310
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/mvnw.cmd182
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/pom.xml144
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/java/com/rabbitmq/stream/HttpTest.java903
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java260
-rw-r--r--deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/resources/logback-test.xml13
-rw-r--r--deps/rabbitmq_top/BUILD.bazel44
-rw-r--r--deps/rabbitmq_top/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_top/Makefile4
-rw-r--r--deps/rabbitmq_top/erlang.mk7808
-rw-r--r--deps/rabbitmq_top/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_top/src/rabbit_top_app.erl2
-rw-r--r--deps/rabbitmq_top/src/rabbit_top_extension.erl2
-rw-r--r--deps/rabbitmq_top/src/rabbit_top_sup.erl2
-rw-r--r--deps/rabbitmq_top/src/rabbit_top_util.erl2
-rw-r--r--deps/rabbitmq_top/src/rabbit_top_wm_ets_tables.erl2
-rw-r--r--deps/rabbitmq_top/src/rabbit_top_wm_process.erl2
-rw-r--r--deps/rabbitmq_top/src/rabbit_top_wm_processes.erl2
-rw-r--r--deps/rabbitmq_top/src/rabbit_top_worker.erl2
-rw-r--r--deps/rabbitmq_tracing/BUILD.bazel70
-rw-r--r--deps/rabbitmq_tracing/Makefile4
-rw-r--r--deps/rabbitmq_tracing/erlang.mk7808
-rw-r--r--deps/rabbitmq_tracing/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_app.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_consumer.erl6
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_consumer_sup.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_files.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_mgmt.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_sup.erl17
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_traces.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_util.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_wm_file.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_wm_files.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_wm_trace.erl2
-rw-r--r--deps/rabbitmq_tracing/src/rabbit_tracing_wm_traces.erl17
-rw-r--r--deps/rabbitmq_tracing/test/rabbit_tracing_SUITE.erl2
-rw-r--r--deps/rabbitmq_trust_store/BUILD.bazel101
-rw-r--r--deps/rabbitmq_trust_store/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_trust_store/Makefile4
-rw-r--r--deps/rabbitmq_trust_store/erlang.mk7808
-rw-r--r--deps/rabbitmq_trust_store/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_trust_store/src/rabbit_trust_store.erl6
-rw-r--r--deps/rabbitmq_trust_store/src/rabbit_trust_store_app.erl4
-rw-r--r--deps/rabbitmq_trust_store/src/rabbit_trust_store_certificate_provider.erl4
-rw-r--r--deps/rabbitmq_trust_store/src/rabbit_trust_store_file_provider.erl2
-rw-r--r--deps/rabbitmq_trust_store/src/rabbit_trust_store_http_provider.erl2
-rw-r--r--deps/rabbitmq_trust_store/src/rabbit_trust_store_sup.erl2
-rw-r--r--deps/rabbitmq_trust_store/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_trust_store/test/system_SUITE.erl26
-rw-r--r--deps/rabbitmq_web_dispatch/BUILD.bazel88
-rw-r--r--deps/rabbitmq_web_dispatch/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_web_dispatch/Makefile4
-rw-r--r--deps/rabbitmq_web_dispatch/erlang.mk7808
-rw-r--r--deps/rabbitmq_web_dispatch/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_cowboy_middleware.erl2
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_cowboy_redirect.erl2
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_cowboy_stream_h.erl2
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch.erl2
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_app.erl2
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_listing_handler.erl2
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_registry.erl10
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_sup.erl12
-rw-r--r--deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_util.erl2
-rw-r--r--deps/rabbitmq_web_dispatch/src/webmachine_log.erl4
-rw-r--r--deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_SUITE.erl7
-rw-r--r--deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_web_mqtt/BUILD.bazel99
-rw-r--r--deps/rabbitmq_web_mqtt/Makefile4
-rw-r--r--deps/rabbitmq_web_mqtt/erlang.mk7808
-rw-r--r--deps/rabbitmq_web_mqtt/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_app.erl21
-rw-r--r--deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_info.erl4
-rw-r--r--deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_sup.erl11
-rw-r--r--deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_handler.erl123
-rw-r--r--deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_middleware.erl6
-rw-r--r--deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_stream_handler.erl41
-rw-r--r--deps/rabbitmq_web_mqtt/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_web_mqtt/test/proxy_protocol_SUITE.erl4
-rw-r--r--deps/rabbitmq_web_mqtt/test/src/rabbit_ws_test_util.erl2
-rw-r--r--deps/rabbitmq_web_mqtt/test/src/rfc6455_client.erl2
-rw-r--r--deps/rabbitmq_web_mqtt/test/system_SUITE.erl (renamed from deps/rabbitmq_web_mqtt/test/src/system_SUITE.erl)4
-rw-r--r--deps/rabbitmq_web_mqtt_examples/BUILD.bazel40
-rw-r--r--deps/rabbitmq_web_mqtt_examples/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_web_mqtt_examples/Makefile4
-rw-r--r--deps/rabbitmq_web_mqtt_examples/erlang.mk7808
-rw-r--r--deps/rabbitmq_web_mqtt_examples/priv/echo.html2
-rw-r--r--deps/rabbitmq_web_mqtt_examples/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_web_mqtt_examples/src/rabbit_web_mqtt_examples_app.erl4
-rw-r--r--deps/rabbitmq_web_stomp/BUILD.bazel119
-rw-r--r--deps/rabbitmq_web_stomp/CONTRIBUTING.md4
-rw-r--r--deps/rabbitmq_web_stomp/Makefile4
-rw-r--r--deps/rabbitmq_web_stomp/erlang.mk7808
-rw-r--r--deps/rabbitmq_web_stomp/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_web_stomp/src/rabbit_web_stomp_app.erl2
-rw-r--r--deps/rabbitmq_web_stomp/src/rabbit_web_stomp_connection_sup.erl12
-rw-r--r--deps/rabbitmq_web_stomp/src/rabbit_web_stomp_handler.erl52
-rw-r--r--deps/rabbitmq_web_stomp/src/rabbit_web_stomp_internal_event_handler.erl2
-rw-r--r--deps/rabbitmq_web_stomp/src/rabbit_web_stomp_listener.erl19
-rw-r--r--deps/rabbitmq_web_stomp/src/rabbit_web_stomp_middleware.erl6
-rw-r--r--deps/rabbitmq_web_stomp/src/rabbit_web_stomp_stream_handler.erl41
-rw-r--r--deps/rabbitmq_web_stomp/src/rabbit_web_stomp_sup.erl2
-rw-r--r--deps/rabbitmq_web_stomp/test/amqp_stomp_SUITE.erl2
-rw-r--r--deps/rabbitmq_web_stomp/test/config_schema_SUITE.erl2
-rw-r--r--deps/rabbitmq_web_stomp/test/cowboy_websocket_SUITE.erl2
-rw-r--r--deps/rabbitmq_web_stomp/test/proxy_protocol_SUITE.erl4
-rw-r--r--deps/rabbitmq_web_stomp/test/raw_websocket_SUITE.erl2
-rw-r--r--deps/rabbitmq_web_stomp/test/src/rabbit_ws_test_util.erl2
-rw-r--r--deps/rabbitmq_web_stomp/test/src/rfc6455_client.erl2
-rw-r--r--deps/rabbitmq_web_stomp/test/src/stomp.erl2
-rw-r--r--deps/rabbitmq_web_stomp/test/unit_SUITE.erl2
-rw-r--r--deps/rabbitmq_web_stomp_examples/BUILD.bazel40
-rw-r--r--deps/rabbitmq_web_stomp_examples/Makefile4
-rw-r--r--deps/rabbitmq_web_stomp_examples/erlang.mk7808
-rw-r--r--deps/rabbitmq_web_stomp_examples/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_web_stomp_examples/src/rabbit_web_stomp_examples_app.erl4
-rw-r--r--dist.bzl200
-rw-r--r--elixir_home.bzl13
-rw-r--r--erlang.mk367
-rw-r--r--erlang_ls.config28
-rw-r--r--mk/bazel.mk42
-rw-r--r--mk/github-actions.mk118
-rw-r--r--mk/rabbitmq-mix.mk8
-rw-r--r--mk/stats.mk17
-rw-r--r--packaging/Makefile86
-rw-r--r--packaging/RPMS/Fedora/Makefile171
-rw-r--r--packaging/RPMS/Fedora/rabbitmq-server.init192
-rw-r--r--packaging/RPMS/Fedora/rabbitmq-server.logrotate7
-rw-r--r--packaging/RPMS/Fedora/rabbitmq-server.service41
-rw-r--r--packaging/RPMS/Fedora/rabbitmq-server.spec520
-rw-r--r--packaging/RPMS/Fedora/rabbitmq-server.tmpfiles1
-rwxr-xr-xpackaging/RPMS/Fedora/scripts/compare-rpm-versions.py41
-rwxr-xr-xpackaging/RPMS/Fedora/scripts/format-package-version8
-rwxr-xr-xpackaging/RPMS/Fedora/scripts/parse-changelog.sh14
-rwxr-xr-xpackaging/RPMS/Fedora/scripts/update-changelog.sh56
-rw-r--r--packaging/debs/Debian/.gitignore3
-rw-r--r--packaging/debs/Debian/Makefile128
-rw-r--r--packaging/debs/Debian/debian/changelog547
-rw-r--r--packaging/debs/Debian/debian/compat1
-rw-r--r--packaging/debs/Debian/debian/control89
-rw-r--r--packaging/debs/Debian/debian/dirs9
-rw-r--r--packaging/debs/Debian/debian/postinst79
-rw-r--r--packaging/debs/Debian/debian/postrm75
-rw-r--r--packaging/debs/Debian/debian/rabbitmq-server.default9
-rw-r--r--packaging/debs/Debian/debian/rabbitmq-server.docs1
-rw-r--r--packaging/debs/Debian/debian/rabbitmq-server.init200
-rw-r--r--packaging/debs/Debian/debian/rabbitmq-server.logrotate7
-rw-r--r--packaging/debs/Debian/debian/rabbitmq-server.manpages4
-rw-r--r--packaging/debs/Debian/debian/rabbitmq-server.service39
-rwxr-xr-xpackaging/debs/Debian/debian/rules63
-rw-r--r--packaging/debs/Debian/debian/source/format1
-rw-r--r--packaging/debs/Debian/debian/watch4
-rwxr-xr-xpackaging/debs/Debian/scripts/compare-debian-versions.py22
-rwxr-xr-xpackaging/debs/Debian/scripts/format-package-version8
-rwxr-xr-xpackaging/debs/Debian/scripts/get-debian-package-files-list.sh33
-rwxr-xr-xpackaging/debs/Debian/scripts/update-changelog.sh36
-rw-r--r--packaging/debs/apt-repository/Makefile31
-rw-r--r--packaging/debs/apt-repository/README17
-rw-r--r--packaging/debs/apt-repository/README-real-repository130
-rw-r--r--packaging/debs/apt-repository/distributions7
-rw-r--r--packaging/debs/apt-repository/dupload.conf16
-rw-r--r--packaging/docker-image/10-default-guest-user.conf8
-rw-r--r--packaging/docker-image/Dockerfile94
-rw-r--r--packaging/docker-image/Makefile72
-rwxr-xr-xpackaging/docker-image/docker-entrypoint.sh422
-rw-r--r--packaging/docker-image/otp-versions/otp-max.yaml8
-rw-r--r--packaging/docker-image/otp-versions/otp-min.yaml6
-rw-r--r--packaging/windows-exe/Makefile36
-rw-r--r--packaging/windows-exe/plugins/ExecDos.dllbin6656 -> 0 bytes
-rwxr-xr-xpackaging/windows-exe/plugins/ShellLink.dllbin4608 -> 0 bytes
-rw-r--r--packaging/windows-exe/rabbitmq.icobin4286 -> 0 bytes
-rw-r--r--packaging/windows-exe/rabbitmq_nsi.in299
-rw-r--r--packaging/windows/Makefile66
-rw-r--r--packaging/windows/README-etc7
-rw-r--r--plugins.mk1
-rw-r--r--rabbitmq-components.mk172
-rw-r--r--rabbitmq.bzl235
-rw-r--r--rabbitmq_home.bzl151
-rw-r--r--rabbitmq_package_generic_unix.bzl19
-rw-r--r--rabbitmq_run.bzl68
-rw-r--r--rabbitmqctl.bzl32
-rw-r--r--release-notes/3.5.7.md169
-rw-r--r--release-notes/3.5.8.md30
-rw-r--r--release-notes/3.6.0.md462
-rw-r--r--release-notes/3.6.1.md280
-rw-r--r--release-notes/3.6.10.md183
-rw-r--r--release-notes/3.6.11.md236
-rw-r--r--release-notes/3.6.12.md67
-rw-r--r--release-notes/3.6.13.md119
-rw-r--r--release-notes/3.6.14.md48
-rw-r--r--release-notes/3.6.15.md142
-rw-r--r--release-notes/3.6.16.md143
-rw-r--r--release-notes/3.6.2.md375
-rw-r--r--release-notes/3.6.3.md218
-rw-r--r--release-notes/3.6.4.md77
-rw-r--r--release-notes/3.6.5.md24
-rw-r--r--release-notes/3.6.6.md234
-rw-r--r--release-notes/3.6.7.md413
-rw-r--r--release-notes/3.6.8.md59
-rw-r--r--release-notes/3.6.9.md63
-rw-r--r--release-notes/3.7.0.md475
-rw-r--r--release-notes/3.7.1.md121
-rw-r--r--release-notes/3.7.10.md248
-rw-r--r--release-notes/3.7.11.md139
-rw-r--r--release-notes/3.7.12.md209
-rw-r--r--release-notes/3.7.13.md177
-rw-r--r--release-notes/3.7.14.md100
-rw-r--r--release-notes/3.7.15.md255
-rw-r--r--release-notes/3.7.16.md174
-rw-r--r--release-notes/3.7.17.md122
-rw-r--r--release-notes/3.7.18.md163
-rw-r--r--release-notes/3.7.19.md114
-rw-r--r--release-notes/3.7.2.md30
-rw-r--r--release-notes/3.7.20.md163
-rw-r--r--release-notes/3.7.21.md98
-rw-r--r--release-notes/3.7.22.md100
-rw-r--r--release-notes/3.7.23.md78
-rw-r--r--release-notes/3.7.24.md114
-rw-r--r--release-notes/3.7.25.md115
-rw-r--r--release-notes/3.7.26.md79
-rw-r--r--release-notes/3.7.27.md83
-rw-r--r--release-notes/3.7.28.md64
-rw-r--r--release-notes/3.7.3.md110
-rw-r--r--release-notes/3.7.4.md117
-rw-r--r--release-notes/3.7.5.md201
-rw-r--r--release-notes/3.7.6.md122
-rw-r--r--release-notes/3.7.7.md86
-rw-r--r--release-notes/3.7.8.md339
-rw-r--r--release-notes/3.7.9.md251
-rw-r--r--release-notes/3.8.0.md288
-rw-r--r--release-notes/3.8.1.md219
-rw-r--r--release-notes/3.8.10.md351
-rw-r--r--release-notes/3.8.11.md72
-rw-r--r--release-notes/3.8.12.md214
-rw-r--r--release-notes/3.8.13.md155
-rw-r--r--release-notes/3.8.14.md77
-rw-r--r--release-notes/3.8.15.md175
-rw-r--r--release-notes/3.8.16.md87
-rw-r--r--release-notes/3.8.17.md145
-rw-r--r--release-notes/3.8.18.md179
-rw-r--r--release-notes/3.8.19.md79
-rw-r--r--release-notes/3.8.2.md204
-rw-r--r--release-notes/3.8.20.md124
-rw-r--r--release-notes/3.8.21.md62
-rw-r--r--release-notes/3.8.22.md83
-rw-r--r--release-notes/3.8.23.md72
-rw-r--r--release-notes/3.8.24.md139
-rw-r--r--release-notes/3.8.25.md67
-rw-r--r--release-notes/3.8.26.md64
-rw-r--r--release-notes/3.8.3.md305
-rw-r--r--release-notes/3.8.4.md413
-rw-r--r--release-notes/3.8.5.md315
-rw-r--r--release-notes/3.8.6.md306
-rw-r--r--release-notes/3.8.7.md113
-rw-r--r--release-notes/3.8.8.md153
-rw-r--r--release-notes/3.8.9.md135
-rw-r--r--release-notes/3.9.0.md242
-rw-r--r--release-notes/3.9.1.md70
-rw-r--r--release-notes/3.9.10.md55
-rw-r--r--release-notes/3.9.11.md85
-rw-r--r--release-notes/3.9.2.md49
-rw-r--r--release-notes/3.9.3.md51
-rw-r--r--release-notes/3.9.4.md81
-rw-r--r--release-notes/3.9.5.md56
-rw-r--r--release-notes/3.9.6.md69
-rw-r--r--release-notes/3.9.7.md64
-rw-r--r--release-notes/3.9.8.md83
-rw-r--r--release-notes/3.9.9.md93
-rw-r--r--release-notes/README-1.1.0-alpha.txt84
-rw-r--r--release-notes/README-1.1.1.txt61
-rw-r--r--release-notes/README-1.2.0.txt63
-rw-r--r--release-notes/README-1.3.0.txt96
-rw-r--r--release-notes/README-1.4.0.txt108
-rw-r--r--release-notes/README-1.5.0.txt143
-rw-r--r--release-notes/README-1.5.1.txt81
-rw-r--r--release-notes/README-1.5.2.txt79
-rw-r--r--release-notes/README-1.5.3.txt79
-rw-r--r--release-notes/README-1.5.4.txt81
-rw-r--r--release-notes/README-1.5.5.txt91
-rw-r--r--release-notes/README-1.6.0.txt86
-rw-r--r--release-notes/README-1.7.0.txt105
-rw-r--r--release-notes/README-1.7.1.txt123
-rw-r--r--release-notes/README-1.7.2.txt75
-rw-r--r--release-notes/README-1.8.0.txt208
-rw-r--r--release-notes/README-1.8.1.txt69
-rw-r--r--release-notes/README-2.0.0.txt98
-rw-r--r--release-notes/README-2.1.0.txt53
-rw-r--r--release-notes/README-2.1.1.txt60
-rw-r--r--release-notes/README-2.2.0.txt106
-rw-r--r--release-notes/README-2.3.0.txt163
-rw-r--r--release-notes/README-2.3.1.txt36
-rw-r--r--release-notes/README-2.4.0.txt143
-rw-r--r--release-notes/README-2.4.1.txt87
-rw-r--r--release-notes/README-2.5.0.txt135
-rw-r--r--release-notes/README-2.5.1.txt34
-rw-r--r--release-notes/README-2.6.0.txt181
-rw-r--r--release-notes/README-2.6.1.txt59
-rw-r--r--release-notes/README-2.7.0.txt142
-rw-r--r--release-notes/README-2.7.1.txt105
-rw-r--r--release-notes/README-2.8.0.txt202
-rw-r--r--release-notes/README-2.8.1.txt41
-rw-r--r--release-notes/README-2.8.2.txt106
-rw-r--r--release-notes/README-2.8.3.txt89
-rw-r--r--release-notes/README-2.8.4.txt30
-rw-r--r--release-notes/README-2.8.5.txt47
-rw-r--r--release-notes/README-2.8.6.txt64
-rw-r--r--release-notes/README-2.8.7.txt70
-rw-r--r--release-notes/README-3.0.0.txt244
-rw-r--r--release-notes/README-3.0.1.txt66
-rw-r--r--release-notes/README-3.0.2.txt88
-rw-r--r--release-notes/README-3.0.3.txt55
-rw-r--r--release-notes/README-3.0.4.txt25
-rw-r--r--release-notes/README-3.1.0.txt173
-rw-r--r--release-notes/README-3.1.1.txt64
-rw-r--r--release-notes/README-3.1.2.txt90
-rw-r--r--release-notes/README-3.1.3.txt12
-rw-r--r--release-notes/README-3.1.4.txt91
-rw-r--r--release-notes/README-3.1.5.txt39
-rw-r--r--release-notes/README-3.2.0.txt191
-rw-r--r--release-notes/README-3.2.1.txt55
-rw-r--r--release-notes/README-3.2.2.txt60
-rw-r--r--release-notes/README-3.2.3.txt64
-rw-r--r--release-notes/README-3.2.4.txt87
-rw-r--r--release-notes/README-3.3.0.txt244
-rw-r--r--release-notes/README-3.3.1.txt107
-rw-r--r--release-notes/README-3.3.2.txt118
-rw-r--r--release-notes/README-3.3.3.txt46
-rw-r--r--release-notes/README-3.3.4.txt46
-rw-r--r--release-notes/README-3.3.5.txt99
-rw-r--r--release-notes/README-3.4.0.txt269
-rw-r--r--release-notes/README-3.4.1.txt69
-rw-r--r--release-notes/README-3.4.2.txt66
-rw-r--r--release-notes/README-3.4.3.txt107
-rw-r--r--release-notes/README-3.4.4.txt104
-rw-r--r--release-notes/README-3.5.0.txt170
-rw-r--r--release-notes/README.md16
-rwxr-xr-xscripts/bazel/kill_orphaned_ct_run.sh7
-rw-r--r--scripts/bazel/rabbitmq-run.sh192
-rwxr-xr-xscripts/rabbitmq-server-ha.ocf2423
-rw-r--r--tools/BUILD.bazel5
-rw-r--r--tools/erlang_ls.bzl38
-rw-r--r--user-template.bazelrc17
-rw-r--r--workflow_sources/base_image/workflow.yml49
-rw-r--r--workflow_sources/base_values.yml10
-rw-r--r--workflow_sources/deps.yml430
-rw-r--r--workflow_sources/test/ct.lib.yml131
-rw-r--r--workflow_sources/test/dep.star22
-rw-r--r--workflow_sources/test/finish.lib.yml82
-rw-r--r--workflow_sources/test/helpers.star13
-rw-r--r--workflow_sources/test/prepare.lib.yml143
-rw-r--r--workflow_sources/test/rabbitmq_cli.lib.yml39
-rw-r--r--workflow_sources/test/tests.lib.yml40
-rw-r--r--workflow_sources/test/util.star58
-rw-r--r--workflow_sources/test/workflow.yml16
-rw-r--r--workspace_helpers.bzl279
1897 files changed, 85569 insertions, 362073 deletions
diff --git a/.bazelrc b/.bazelrc
new file mode 100644
index 0000000000..816e431ba0
--- /dev/null
+++ b/.bazelrc
@@ -0,0 +1,57 @@
+build --incompatible_strict_action_env
+build --local_test_jobs=1
+
+build:buildbuddy --bes_results_url=https://app.buildbuddy.io/invocation/
+build:buildbuddy --bes_backend=grpcs://remote.buildbuddy.io
+build:buildbuddy --remote_cache=grpcs://remote.buildbuddy.io
+build:buildbuddy --remote_timeout=1200
+build:buildbuddy --grpc_keepalive_time=360s
+build:buildbuddy --grpc_keepalive_timeout=360s
+build:buildbuddy --remote_download_minimal
+build:buildbuddy --build_metadata=REPO_URL=https://github.com/rabbitmq/rabbitmq-server.git
+
+build:rbe --config=buildbuddy
+
+build:rbe --remote_executor=grpcs://remote.buildbuddy.io
+
+build:rbe --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
+build:rbe --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
+build:rbe --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
+
+build:rbe --@bazel-erlang//:erlang_home=/usr/lib/erlang
+build:rbe --//:elixir_home=/usr/local
+
+build:rbe --spawn_strategy=remote
+build:rbe --test_strategy=""
+build:rbe --jobs=100
+
+build:rbe-23 --config=rbe
+build:rbe-23 --host_javabase=@rbe_23//java:jdk
+build:rbe-23 --javabase=@rbe_23//java:jdk
+build:rbe-23 --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
+build:rbe-23 --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8
+build:rbe-23 --crosstool_top=@rbe_23//cc:toolchain
+build:rbe-23 --extra_toolchains=@rbe_23//config:cc-toolchain
+
+build:rbe-23 --host_platform=@rbe_23//config:platform
+build:rbe-23 --platforms=@rbe_23//config:platform
+build:rbe-23 --extra_execution_platforms=@rbe_23//config:platform
+
+build:rbe-23 --@bazel-erlang//:erlang_version=23
+
+build:rbe-24 --config=rbe
+build:rbe-24 --host_javabase=@rbe_24//java:jdk
+build:rbe-24 --javabase=@rbe_24//java:jdk
+build:rbe-24 --crosstool_top=@rbe_24//cc:toolchain
+build:rbe-24 --extra_toolchains=@rbe_24//config:cc-toolchain
+
+build:rbe-24 --host_platform=@rbe_24//config:platform
+build:rbe-24 --platforms=@rbe_24//config:platform
+build:rbe-24 --extra_execution_platforms=@rbe_24//config:platform
+
+build:rbe-24 --@bazel-erlang//:erlang_version=24
+
+# Try importing a user specific .bazelrc
+# You can create your own by copying and editing the template-user.bazelrc template:
+# cp template-user.bazelrc user.bazelrc
+try-import %workspace%/user.bazelrc
diff --git a/.bazelversion b/.bazelversion
new file mode 100644
index 0000000000..a0f9a4b4bc
--- /dev/null
+++ b/.bazelversion
@@ -0,0 +1 @@
+latest
diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000..fab07de50e
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1 @@
+/deps
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 230febf8a2..c8fd6629e6 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,14 +1,11 @@
## Proposed Changes
-Please describe the big picture of your changes here to communicate to the
-RabbitMQ team why we should accept this pull request. If it fixes a bug or
-resolves a feature request, be sure to link to that issue.
+Please describe the big picture of your changes here to communicate to the RabbitMQ team why we should accept this pull request.
+If it fixes a bug or resolves a feature request, be sure to link to that issue.
-A pull request that doesn't explain **why** the change was made has a much
-lower chance of being accepted.
+A pull request that doesn't explain **why** the change was made has a much lower chance of being accepted.
-If English isn't your first language, don't worry about it and try to
-communicate the problem you are trying to solve to the best of your abilities.
+If English isn't your first language, don't worry about it and try to communicate the problem you are trying to solve to the best of your abilities.
As long as we can understand the intent, it's all good.
## Types of Changes
@@ -21,23 +18,23 @@ _Put an `x` in the boxes that apply_
- [ ] Breaking change (fix or feature that would cause an observable behavior change in existing systems)
- [ ] Documentation improvements (corrections, new content, etc)
- [ ] Cosmetic change (whitespace, formatting, etc)
+- [ ] Build system and/or CI
## Checklist
-_Put an `x` in the boxes that apply. You can also fill these out after creating
-the PR. If you're unsure about any of them, don't hesitate to ask on the
-mailing list. We're here to help! This is simply a reminder of what we are
-going to look for before merging your code._
+_Put an `x` in the boxes that apply.
+You can also fill these out after creating the PR.
+If you're unsure about any of them, don't hesitate to ask on the mailing list.
+We're here to help!
+This is simply a reminder of what we are going to look for before merging your code._
- [ ] I have read the `CONTRIBUTING.md` document
- [ ] I have signed the CA (see https://cla.pivotal.io/sign/rabbitmq)
-- [ ] All tests pass locally with my changes
- [ ] I have added tests that prove my fix is effective or that my feature works
-- [ ] I have added necessary documentation (if appropriate)
-- [ ] Any dependent changes have been merged and published in related repositories
+- [ ] All tests pass locally with my changes
+- [ ] If relevant, I have added necessary documentation to https://github.com/rabbitmq/rabbitmq-website
+- [ ] If relevant, I have added this change to the first version(s) in release-notes that I expect to introduce it
## Further Comments
-If this is a relatively large or complex change, kick off the discussion by
-explaining why you chose the solution you did and what alternatives you
-considered, etc.
+If this is a relatively large or complex change, kick off the discussion by explaining why you chose the solution you did and what alternatives you considered, etc.
diff --git a/.github/SECURITY.md b/.github/SECURITY.md
new file mode 100644
index 0000000000..121dd38c47
--- /dev/null
+++ b/.github/SECURITY.md
@@ -0,0 +1,71 @@
+# Security Policy
+
+Team RabbitMQ will investigate all responsibly disclosed vulnerabilities that affect
+a recent version in one of the [supported release series](https://www.rabbitmq.com/versions.html).
+We ask all reporters to provide a reasonable amount of information that can be used to reproduce
+the observed behavior.
+
+## Reporting a Vulnerability
+
+RabbitMQ Core team really appreciates responsible vulnerability reports
+from security researchers and our user community.
+
+To responsibly disclose a vulnerability, please email `security@rabbitmq.com` or
+[sign up for RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com) and
+send a DM to @michaelklishin. For reports received via Slack, a separate private
+channel will be set up so that multiple RabbitMQ maintainers can access the disclosed
+information.
+
+In case you'd prefer to encrypt your report, use the [RabbitMQ release signing public key](https://github.com/rabbitmq/signing-keys/releases).
+
+When reporting a vulnerability, please including the following information:
+
+ * Supported RabbitMQ version used
+ * Any relevant environment information (e.g. operating system and Erlang version used)
+ * A set of steps to reproduce the problem
+ * Why do you think this behavior is a security vulnerability
+
+A received vulnerability report will be acknowledged by a RabbitMQ core team or VMware R&D staff member.
+
+As the security issue moves from triage, to identified fix, to release planning we will keep the reporter updated.
+
+### When Should I Report a Vulnerability?
+
+ * You think you discovered a potential security vulnerability in RabbitMQ
+ * You think you discovered a potential security vulnerability in one of RabbitMQ client libraries or dependencies
+ * For projects with their own vulnerability reporting and disclosure process (e.g. Erlang/OTP), please report it directly there
+
+### When Should I NOT Report a Vulnerability?
+
+ * Not enough information is available to triage (try to reliably reproduce) the issue
+ * You need help tuning RabbitMQ for security. See [Commercial Services](https://www.rabbitmq.com/services.html)
+ * You need help applying security related updates. See [Upgrades](https://www.rabbitmq.com/upgrade.html)
+
+### On Security Scan Dumps
+
+A warning from a security scanner does not necessarily indicate a vulnerability in RabbitMQ.
+Many of such warnings are specific to a certain environment. RabbitMQ core team does not have
+access to most commercial security scanners or enough information about the deployment,
+so **security scan results alone will not be considered sufficient evidence** of a vulnerability.
+
+
+### Irresponsible Disclosure
+
+Publicly disclosed vulnerabilities (e.g. publicly filed issues with repoduction steps or scripts)
+will be removed or otherwise taken private as irresponsibly disclosed.
+
+
+## Public Disclosure Timing
+
+A public disclosure date is negotiated by the RabbitMQ core team and the vulnerability reporter.
+In most cases disclosure happens within two weeks after a patch release of RabbitMQ is made available.
+When VMware products that depend on RabbitMQ are also affected, the disclosure period can be extended
+further to allow those projects to ship patch releases.
+
+
+## Tanzu RabbitMQ
+
+[Tanzu RabbitMQ](https://tanzu.vmware.com/rabbitmq) is covered by the [VMware Security Response Policy](https://www.vmware.com/support/policies/security_response.html).
+
+Vulnerabilities found in Tanzu RabbitMQ can be reported to the RabbitMQ core team or
+via the [VMware Security Response Center](https://www.vmware.com/security/vsrc.html).
diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml
new file mode 100644
index 0000000000..2c7d170839
--- /dev/null
+++ b/.github/dependabot.yaml
@@ -0,0 +1,7 @@
+version: 2
+updates:
+ # Maintain dependencies for GitHub Actions
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "daily"
diff --git a/.github/mergify.yml b/.github/mergify.yml
new file mode 100644
index 0000000000..a818304e74
--- /dev/null
+++ b/.github/mergify.yml
@@ -0,0 +1,49 @@
+pull_request_rules:
+ - name: Add bazel label if a Bazel file is modified
+ conditions:
+ - files~=\.(bazel|bzl)$
+ actions:
+ label:
+ add:
+ - bazel
+ - name: Add make label if a Make file is modified
+ conditions:
+ - files~=(Makefile|\.mk)$
+ actions:
+ label:
+ add:
+ - make
+ - name: Automatically backport to v3.9.x based on label
+ conditions:
+ - base=master
+ - label=backport-v3.9.x
+ - label!=backport-v3.8.x
+ actions:
+ backport:
+ branches:
+ - v3.9.x
+ assignees:
+ - "{{ author }}"
+ - name: Automatically backport to v3.9.x & v3.8.x based on labels
+ conditions:
+ - base=master
+ - label=backport-v3.9.x
+ - label=backport-v3.8.x
+ actions:
+ backport:
+ branches:
+ - v3.9.x
+ labels:
+ - backport-v3.8.x
+ assignees:
+ - "{{ author }}"
+ - name: Automatically backport to v3.8.x based on label
+ conditions:
+ - base=v3.9.x
+ - label=backport-v3.8.x
+ actions:
+ backport:
+ branches:
+ - v3.8.x
+ assignees:
+ - "{{ author }}"
diff --git a/.github/workflows/base-images.yaml b/.github/workflows/base-images.yaml
deleted file mode 100644
index ea670b73f5..0000000000
--- a/.github/workflows/base-images.yaml
+++ /dev/null
@@ -1,54 +0,0 @@
-name: Workflow Base Images
-on:
-- workflow_dispatch
-jobs:
- ci-base-22_3:
- name: ci-base-22_3
- runs-on: ubuntu-18.04
- steps:
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: CREATE ERLANG+ELIXIR IMAGE (22.3)
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/erlang_elixir
- dockerfile: ci/dockerfiles/22.3/erlang_elixir
- tags: "22.3"
- - name: CREATE BASE CI IMAGE (22.3)
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci-base
- dockerfile: ci/dockerfiles/ci-base
- build_args: ERLANG_VERSION=22.3,SECONDARY_UMBRELLA_GITREFS=v3.7.28 v3.8.9
- tags: "22.3"
- ci-base-23_1:
- name: ci-base-23_1
- runs-on: ubuntu-18.04
- steps:
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: CREATE ERLANG+ELIXIR IMAGE (23.1)
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/erlang_elixir
- dockerfile: ci/dockerfiles/23.1/erlang_elixir
- tags: "23.1"
- - name: CREATE BASE CI IMAGE (23.1)
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci-base
- dockerfile: ci/dockerfiles/ci-base
- build_args: ERLANG_VERSION=23.1,SECONDARY_UMBRELLA_GITREFS=v3.7.28 v3.8.9
- tags: "23.1"
diff --git a/.github/workflows/maintenance.yaml b/.github/workflows/maintenance.yaml
deleted file mode 100644
index 270f4bfb0f..0000000000
--- a/.github/workflows/maintenance.yaml
+++ /dev/null
@@ -1,45 +0,0 @@
-name: Maintenance
-on:
- schedule:
- - cron: '0 2 * * *'
-jobs:
- cleanup-images:
- name: cleanup-images
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - run: |
- gcloud auth configure-docker
- - name: CLEAN OLD BUILD IMAGES
- env:
- IMAGE: eu.gcr.io/cf-rabbitmq-core/ci
- run: |
- DATE=$(date -d "-1 week" '+%Y-%m-%d')
- C=0
- for digest in $(gcloud container images list-tags ${IMAGE} --limit=999999 --sort-by=TIMESTAMP \
- --filter="timestamp.datetime < '${DATE}'" --format='get(digest)'); do
- (
- set -x
- gcloud container images delete -q --force-delete-tags "${IMAGE}@${digest}"
- )
- C=$(expr $C + 1)
- done
- echo "Deleted ${C} images in ${IMAGE}." >&2
- - name: CLEAN OLD BUILD IMAGES
- env:
- IMAGE: eu.gcr.io/cf-rabbitmq-core/ci-rabbit
- run: |
- DATE=$(date -d "-1 week" '+%Y-%m-%d')
- C=0
- for digest in $(gcloud container images list-tags ${IMAGE} --limit=999999 --sort-by=TIMESTAMP \
- --filter="timestamp.datetime < '${DATE}'" --format='get(digest)'); do
- (
- set -x
- gcloud container images delete -q --force-delete-tags "${IMAGE}@${digest}"
- )
- C=$(expr $C + 1)
- done
- echo "Deleted ${C} images in ${IMAGE}." >&2
diff --git a/.github/workflows/oci.yaml b/.github/workflows/oci.yaml
new file mode 100644
index 0000000000..1771e2f0b4
--- /dev/null
+++ b/.github/workflows/oci.yaml
@@ -0,0 +1,114 @@
+# https://github.com/marketplace/actions/build-and-push-docker-images
+name: OCI
+on:
+ push:
+ paths:
+ - 'deps/**'
+ - 'packaging/**'
+ - 'scripts/**'
+ - Makefile
+ - plugins.mk
+ - rabbitmq-components.mk
+ - .github/workflows/oci.yaml
+ workflow_dispatch:
+env:
+ GENERIC_UNIX_ARCHIVE: ${{ github.workspace }}/PACKAGES/rabbitmq-server-generic-unix-${{ github.sha }}.tar.xz
+ RABBITMQ_VERSION: ${{ github.sha }}
+ VERSION: ${{ github.sha }}
+jobs:
+
+ # This job will build one docker image per supported Erlang major version.
+ # Each image will have two tags (one containing the Git commit SHA, one containing the branch name).
+ #
+ # For example, for Git commit SHA '111aaa' and branch name 'main' and maximum supported Erlang major version '24',
+ # the following tags will be pushed to Dockerhub:
+ #
+ # * 111aaa-otp-min (image OTP 23)
+ # * main-otp-min (image OTP 23)
+ # * 111aaa-otp-max (image OTP 24)
+ # * main-otp-max (image OTP 24)
+
+ build-publish-dev:
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ # Build image for every supported Erlang major version.
+ # Source of truth for OTP versions (min & max): https://www.rabbitmq.com/which-erlang.html
+ image_tag_suffix:
+ - otp-min
+ - otp-max
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2.4.0
+
+ - name: Load version info
+ id: load-info
+ run: |
+ FILE=packaging/docker-image/otp-versions/${{ matrix.image_tag_suffix }}.yaml
+ echo "::set-output name=otp::$(yq eval '.otp' $FILE)"
+ echo "::set-output name=otp_sha256::$(yq eval '.otp_sha256' $FILE)"
+ echo "::set-output name=elixir::$(yq eval '.elixir' $FILE)"
+
+ - name: Set up Erlang & Elixir
+ uses: erlef/setup-beam@v1.9
+ with:
+ otp-version: ${{ steps.load-info.outputs.otp }}
+ elixir-version: ${{ steps.load-info.outputs.elixir }}
+
+ - name: Build generic unix package
+ run: |
+ make package-generic-unix PROJECT_VERSION=${{ github.sha }}
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v1
+
+ - name: Cache Docker layers
+ uses: actions/cache@v2
+ with:
+ path: /tmp/.buildx-cache
+ key: ${{ runner.os }}-${{ matrix.image_tag_suffix }}-buildx-${{ github.sha }}
+ restore-keys: |
+ ${{ runner.os }}-${{ matrix.image_tag_suffix }}-buildx-
+
+ - name: Login to DockerHub
+ uses: docker/login-action@v1
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_PASSWORD }}
+
+ - name: Expand generic-unix-package
+ working-directory: packaging/docker-image
+ run: |
+ xzcat ${GENERIC_UNIX_ARCHIVE} | tar xvf -
+
+ - name: Compute image tags
+ id: compute-tags
+ run: |
+ echo "::set-output name=TAG_1::${{ github.sha }}-${{ matrix.image_tag_suffix }}"
+ echo "::set-output name=TAG_2::${GITHUB_REF##*/}-${{ matrix.image_tag_suffix }}"
+
+ - name: Build and push
+ uses: docker/build-push-action@v2
+ with:
+ context: packaging/docker-image
+ push: true
+ tags: |
+ pivotalrabbitmq/rabbitmq:${{ steps.compute-tags.outputs.TAG_1 }}
+ pivotalrabbitmq/rabbitmq:${{ steps.compute-tags.outputs.TAG_2 }}
+ build-args: |
+ SKIP_PGP_VERIFY=true
+ PGP_KEYSERVER=pgpkeys.eu
+ OTP_VERSION=${{ steps.load-info.outputs.otp }}
+ OTP_SHA256=${{ steps.load-info.outputs.otp_sha256 }}
+ RABBITMQ_BUILD=rabbitmq_server-${{ github.sha }}
+ cache-from: type=local,src=/tmp/.buildx-cache
+ cache-to: type=local,dest=/tmp/.buildx-cache-new
+
+ # Temp fix
+ # https://github.com/docker/build-push-action/issues/252
+ # https://github.com/moby/buildkit/issues/1896
+ - name: Move cache
+ run: |
+ rm -rf /tmp/.buildx-cache
+ mv /tmp/.buildx-cache-new /tmp/.buildx-cache
diff --git a/.github/workflows/perform-bazel-execution-comparison.yaml b/.github/workflows/perform-bazel-execution-comparison.yaml
new file mode 100644
index 0000000000..d17d3897e6
--- /dev/null
+++ b/.github/workflows/perform-bazel-execution-comparison.yaml
@@ -0,0 +1,111 @@
+name: Gather Bazel Execution Logs
+on:
+ workflow_dispatch:
+ inputs:
+ target:
+ description: 'A bazel label representing the test target'
+ required: true
+ default: '//deps/rabbit:rabbit_stream_queue_SUITE'
+jobs:
+ run-a:
+ name: Run A
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ erlang_version:
+ - "24"
+ include:
+ - erlang_version: "24"
+ cache_name: ci-bazel-cache-analysis
+ timeout-minutes: 120
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: CONFIGURE BAZEL
+ run: |
+ cat << EOF >> user.bazelrc
+ build:buildbuddy --remote_header=x-buildbuddy-api-key=${{ secrets.BUILDBUDDY_API_KEY }}
+
+ build:buildbuddy --build_metadata=ROLE=CI
+ build:buildbuddy --remote_instance_name=buildbuddy-io/buildbuddy/ci-${{ matrix.cache_name }}
+ EOF
+ - name: RUN TESTS
+ run: |
+ sudo sysctl -w net.ipv4.tcp_keepalive_time=60
+ sudo ethtool -K eth0 tso off gso off gro off tx off rx off lro off
+ bazelisk test ${{ github.event.inputs.target }} \
+ --config=rbe-${{ matrix.erlang_version }} \
+ --execution_log_binary_file=/tmp/exec.log
+ - name: SAVE EXECUTION LOG BINARY
+ uses: actions/upload-artifact@v2-preview
+ with:
+ name: execution-log-binary-A
+ path: /tmp/exec.log
+ run-b:
+ name: Run B
+ needs: run-a
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ erlang_version:
+ - "24"
+ include:
+ - erlang_version: "24"
+ cache_name: ci-bazel-cache-analysis
+ timeout-minutes: 120
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: CONFIGURE BAZEL
+ run: |
+ cat << EOF >> user.bazelrc
+ build:buildbuddy --remote_header=x-buildbuddy-api-key=${{ secrets.BUILDBUDDY_API_KEY }}
+
+ build:buildbuddy --build_metadata=ROLE=CI
+ build:buildbuddy --remote_instance_name=buildbuddy-io/buildbuddy/ci-${{ matrix.cache_name }}
+ EOF
+ - name: RUN TESTS
+ run: |
+ sudo sysctl -w net.ipv4.tcp_keepalive_time=60
+ sudo ethtool -K eth0 tso off gso off gro off tx off rx off lro off
+ bazelisk test ${{ github.event.inputs.target }} \
+ --config=rbe-${{ matrix.erlang_version }} \
+ --execution_log_binary_file=/tmp/exec.log
+ - name: SAVE EXECUTION LOG BINARY
+ uses: actions/upload-artifact@v2-preview
+ with:
+ name: execution-log-binary-B
+ path: /tmp/exec.log
+ parse-logs:
+ name: Parse Logs
+ needs: [run-a, run-b]
+ runs-on: ubuntu-latest
+ steps:
+ - name: CHECKOUT BAZEL
+ uses: actions/checkout@v2.4.0
+ with:
+ repository: bazelbuild/bazel
+ path: bazel
+ - name: MOUNT BAZEL CACHE
+ uses: actions/cache@v2
+ with:
+ path: "/home/runner/.cache/bazel"
+ key: bazel
+ - name: BUILD EXECLOG PARSER
+ working-directory: bazel
+ run: |
+ bazelisk build src/tools/execlog:parser
+ - name: FETCH LOGS
+ uses: actions/download-artifact@v2
+ - name: PARSE LOGS
+ run: |
+ bazel/bazel-bin/src/tools/execlog/parser \
+ --log_path=./execution-log-binary-A/exec.log \
+ --log_path=./execution-log-binary-B/exec.log \
+ --output_path=/tmp/execution-log-binary-A.log.txt \
+ --output_path=/tmp/execution-log-binary-B.log.txt
+ - name: SAVE PARSED LOGS
+ uses: actions/upload-artifact@v2-preview
+ with:
+ name: parsed-logs
+ path: /tmp/execution-log-binary-*.log.txt
diff --git a/.github/workflows/rabbitmq_peer_discovery_aws.yaml b/.github/workflows/rabbitmq_peer_discovery_aws.yaml
new file mode 100644
index 0000000000..6446ca3dff
--- /dev/null
+++ b/.github/workflows/rabbitmq_peer_discovery_aws.yaml
@@ -0,0 +1,68 @@
+name: Peer Discovery AWS Integration Test
+on:
+ push:
+ branches:
+ - master
+ - v3.9.x
+ - v3.8.x
+ paths:
+ - 'deps/rabbitmq_peer_discovery_aws/**'
+ - .github/workflows/rabbitmq_peer_discovery_aws.yaml
+ workflow_dispatch:
+jobs:
+ peer-discovery-aws-integration-test:
+ name: Integration Test
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ image_tag_suffix:
+ - otp-max
+ include:
+ - image_tag_suffix: otp-max
+ erlang_version: "24"
+ rbe_default_branch: linux-erlang-24.1
+ timeout-minutes: 45
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: WAIT FOR OCI IMAGE WORKFLOW
+ uses: lewagon/wait-on-check-action@v1.1.1
+ with:
+ ref: ${{ github.ref }}
+ check-name: build-publish-dev (${{ matrix.image_tag_suffix }})
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+ wait-interval: 30 # seconds
+ - name: MOUNT BAZEL CACHE
+ uses: actions/cache@v1
+ with:
+ path: "/home/runner/repo-cache/"
+ key: repo-cache
+ - name: CONFIGURE BAZEL
+ run: |
+ cat << EOF >> user.bazelrc
+ build:buildbuddy --remote_header=x-buildbuddy-api-key=${{ secrets.BUILDBUDDY_API_KEY }}
+
+ build:buildbuddy --build_metadata=ROLE=CI
+ build:buildbuddy --build_metadata=VISIBILITY=PRIVATE
+ build:buildbuddy --remote_instance_name=buildbuddy-io/buildbuddy/ci-aws-${{ matrix.erlang_version }}
+ build:buildbuddy --repository_cache=/home/runner/repo-cache/
+ build:buildbuddy --color=yes
+ build:buildbuddy --disk_cache=
+ EOF
+ #! - name: Setup tmate session
+ #! uses: mxschmitt/action-tmate@v3
+ - name: RUN INTEGRATION TESTS
+ run: |
+ sudo sysctl -w net.ipv4.tcp_keepalive_time=60
+ sudo ethtool -K eth0 tso off gso off gro off tx off rx off lro off
+
+ branch_or_tag="${GITHUB_REF##*/}"
+ bazelisk test //deps/rabbitmq_peer_discovery_aws:integration_SUITE \
+ --config=rbe-${{ matrix.erlang_version }} \
+ --test_tag_filters=aws \
+ --build_tests_only \
+ --test_env AWS_ACCESS_KEY_ID=${{ secrets.CONCOURSE_AWS_ACCESS_KEY_ID }} \
+ --test_env AWS_SECRET_ACCESS_KEY=${{ secrets.CONCOURSE_AWS_SECRET_ACCESS_KEY }} \
+ --test_env RABBITMQ_IMAGE="pivotalrabbitmq/rabbitmq:${{ github.sha }}-otp-max" \
+ --test_env AWS_ECS_CLUSTER_NAME="rabbitmq-peer-discovery-aws-actions-${branch_or_tag//./-}" \
+ --verbose_failures
diff --git a/.github/workflows/test-erlang-git.yaml b/.github/workflows/test-erlang-git.yaml
new file mode 100644
index 0000000000..185f8c7e27
--- /dev/null
+++ b/.github/workflows/test-erlang-git.yaml
@@ -0,0 +1,66 @@
+name: Test Erlang Git Master
+on:
+ schedule:
+ - cron: '0 2 * * *'
+ workflow_dispatch:
+jobs:
+ test-erlang-git:
+ name: Test (Erlang Git Master)
+ runs-on: ubuntu-latest
+ timeout-minutes: 120
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: UPDATE RBE IMAGE SHA
+ env:
+ IMAGE: pivotalrabbitmq/rabbitmq-server-buildenv
+ TAG: linux-erlang-git-master
+ run: |
+ # buildbuddy caches the container image, so we must use a specific sha to ensure
+ # the latest is used
+ DIGEST="$(skopeo inspect --format '{{.Digest}}' docker://${IMAGE}:${TAG})"
+ echo "Will use ${IMAGE}@${DIGEST}"
+ sudo npm install --global --silent @bazel/buildozer
+ npx buildozer \
+ "dict_set exec_properties container-image:docker://${IMAGE}@${DIGEST}" \
+ //:erlang_git_platform
+ - name: MOUNT BAZEL CACHE
+ uses: actions/cache@v1
+ with:
+ path: "/home/runner/repo-cache/"
+ key: repo-cache
+ - name: CONFIGURE BAZEL
+ run: |
+ cat << EOF >> user.bazelrc
+ build:buildbuddy --remote_header=x-buildbuddy-api-key=${{ secrets.BUILDBUDDY_API_KEY }}
+
+ build:buildbuddy --build_metadata=ROLE=CI
+ build:buildbuddy --build_metadata=VISIBILITY=PUBLIC
+ build:buildbuddy --remote_instance_name=buildbuddy-io/buildbuddy/ci-erlang-git
+ build:buildbuddy --repository_cache=/home/runner/repo-cache/
+ build:buildbuddy --color=yes
+ build:buildbuddy --disk_cache=
+
+ build:rbe-git --crosstool_top=@buildbuddy_toolchain//:toolchain
+ build:rbe-git --extra_toolchains=@buildbuddy_toolchain//:cc_toolchain
+ build:rbe-git --javabase=@buildbuddy_toolchain//:javabase_jdk8
+ build:rbe-git --host_javabase=@buildbuddy_toolchain//:javabase_jdk8
+ build:rbe-git --java_toolchain=@buildbuddy_toolchain//:toolchain_jdk8
+ build:rbe-git --host_java_toolchain=@buildbuddy_toolchain//:toolchain_jdk8
+
+ build:rbe-git --host_platform=//:erlang_git_platform
+ build:rbe-git --platforms=//:erlang_git_platform
+ build:rbe-git --extra_execution_platforms=//:erlang_git_platform
+ build:rbe-git --@bazel-erlang//:erlang_home=/usr/local/lib/erlang
+ build:rbe-git --@bazel-erlang//:erlang_version=25
+ EOF
+ #! - name: Setup tmate session
+ #! uses: mxschmitt/action-tmate@v3
+ - name: RUN TESTS
+ run: |
+ sudo sysctl -w net.ipv4.tcp_keepalive_time=60
+ sudo ethtool -K eth0 tso off gso off gro off tx off rx off lro off
+ bazelisk test //... \
+ --config=rbe-git \
+ --test_tag_filters=-mixed-version-cluster,-exclusive,-aws \
+ --verbose_failures
diff --git a/.github/workflows/test-erlang-otp-22.3.yaml b/.github/workflows/test-erlang-otp-22.3.yaml
deleted file mode 100644
index 7a34c2c1d4..0000000000
--- a/.github/workflows/test-erlang-otp-22.3.yaml
+++ /dev/null
@@ -1,8348 +0,0 @@
-name: Test - Erlang 22.3
-on: push
-jobs:
- prepare:
- name: prepare
- runs-on: ubuntu-18.04
- outputs:
- build_start: ${{ steps.buildevents.outputs.build_start }}
- branch_or_tag_name: ${{ steps.buildevents.outputs.branch_or_tag_name }}
- steps:
- - name: RECORD BUILD START
- id: buildevents
- run: |
- echo "::set-output name=build_start::$(date +%s)"
- branch_or_tag_name=${GITHUB_REF#refs/*/}
- echo "::set-output name=branch_or_tag_name::$branch_or_tag_name"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: PREPARE BUILD IMAGE
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci
- dockerfile: ci/dockerfiles/ci
- build_args: ERLANG_VERSION=22.3,GITHUB_RUN_ID=${{ github.run_id }},BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }},GITHUB_SHA=${{ github.sha }},base_rmq_ref=master,current_rmq_ref=${{ steps.buildevents.outputs.branch_or_tag_name }},RABBITMQ_VERSION=3.9.0
- tags: erlang-22.3-rabbitmq-${{ github.sha }}
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD STEP FINISH
- run: |
- docker run \
- --env project=prepare \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.build_start }} \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/collect.sh
- xref:
- name: xref
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD XREF START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-base:22.3
- - name: RUN XREF rabbit_common
- run: |
- docker run \
- --env project=rabbit_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbit
- run: |
- docker run \
- --env project=rabbit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF amqp_client
- run: |
- docker run \
- --env project=amqp_client \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF amqp10_client
- run: |
- docker run \
- --env project=amqp10_client \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF amqp10_common
- run: |
- docker run \
- --env project=amqp10_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_amqp1_0
- run: |
- docker run \
- --env project=rabbitmq_amqp1_0 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_backend_cache
- run: |
- docker run \
- --env project=rabbitmq_auth_backend_cache \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_backend_http
- run: |
- docker run \
- --env project=rabbitmq_auth_backend_http \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_backend_ldap
- run: |
- docker run \
- --env project=rabbitmq_auth_backend_ldap \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_backend_oauth2
- run: |
- docker run \
- --env project=rabbitmq_auth_backend_oauth2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_mechanism_ssl
- run: |
- docker run \
- --env project=rabbitmq_auth_mechanism_ssl \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_aws
- run: |
- docker run \
- --env project=rabbitmq_aws \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_cli
- run: |
- docker run \
- --env project=rabbitmq_cli \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_consistent_hash_exchange
- run: |
- docker run \
- --env project=rabbitmq_consistent_hash_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_event_exchange
- run: |
- docker run \
- --env project=rabbitmq_event_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_federation
- run: |
- docker run \
- --env project=rabbitmq_federation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_federation_management
- run: |
- docker run \
- --env project=rabbitmq_federation_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_jms_topic_exchange
- run: |
- docker run \
- --env project=rabbitmq_jms_topic_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_management
- run: |
- docker run \
- --env project=rabbitmq_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_management_agent
- run: |
- docker run \
- --env project=rabbitmq_management_agent \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_mqtt
- run: |
- docker run \
- --env project=rabbitmq_mqtt \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_common
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_aws
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_aws \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_k8s
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_k8s \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_consul
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_consul \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_etcd
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_etcd \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_prometheus
- run: |
- docker run \
- --env project=rabbitmq_prometheus \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_random_exchange
- run: |
- docker run \
- --env project=rabbitmq_random_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_recent_history_exchange
- run: |
- docker run \
- --env project=rabbitmq_recent_history_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_sharding
- run: |
- docker run \
- --env project=rabbitmq_sharding \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_shovel
- run: |
- docker run \
- --env project=rabbitmq_shovel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_shovel_management
- run: |
- docker run \
- --env project=rabbitmq_shovel_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_stomp
- run: |
- docker run \
- --env project=rabbitmq_stomp \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_top
- run: |
- docker run \
- --env project=rabbitmq_top \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_tracing
- run: |
- docker run \
- --env project=rabbitmq_tracing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_trust_store
- run: |
- docker run \
- --env project=rabbitmq_trust_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_dispatch
- run: |
- docker run \
- --env project=rabbitmq_web_dispatch \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_mqtt
- run: |
- docker run \
- --env project=rabbitmq_web_mqtt \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_mqtt_examples
- run: |
- docker run \
- --env project=rabbitmq_web_mqtt_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_stomp
- run: |
- docker run \
- --env project=rabbitmq_web_stomp \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_stomp_examples
- run: |
- docker run \
- --env project=rabbitmq_web_stomp_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/xref.sh
- - name: RECORD STEP FINISH
- if: always()
- run: |
- docker run \
- --env project=xref \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ci/scripts:/workspace/rabbitmq/ci/scripts \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:22.3 \
- ci/scripts/collect.sh
- rabbit_common:
- name: rabbit_common
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbit_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbit_common-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbit-checks:
- name: rabbit-checks
- needs:
- - prepare
- runs-on: ubuntu-18.04
- outputs:
- step_start: ${{ steps.buildevents.outputs.step_start }}
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: VALIDATE KNOWN CT SUITES
- env:
- project: rabbit
- run: |
- ci/scripts/validate-workflow.sh amqqueue_backward_compatibility backing_queue channel_interceptor channel_operation_timeout cluster cluster_rename clustering_management config_schema confirms_rejects consumer_timeout crashing_queues dead_lettering definition_import disconnect_detected_during_alarm dynamic_ha dynamic_qq eager_sync feature_flags lazy_queue list_consumers_sanity_check list_queues_online_and_offline maintenance_mode many_node_ha message_size_limit metrics mirrored_supervisor msg_store peer_discovery_classic_config peer_discovery_dns per_user_connection_channel_limit per_user_connection_channel_limit_partitions per_user_connection_channel_tracking per_user_connection_tracking per_vhost_connection_limit per_vhost_connection_limit_partitions per_vhost_msg_store per_vhost_queue_limit policy priority_queue priority_queue_recovery product_info proxy_protocol publisher_confirms_parallel queue_length_limits queue_master_location queue_parallel queue_type quorum_queue rabbit_confirms rabbit_core_metrics_gc rabbit_fifo rabbit_fifo_int rabbit_fifo_prop rabbit_fifo_v0 rabbit_msg_record rabbit_stream_queue rabbitmq_queues_cli_integration rabbitmqctl_integration rabbitmqctl_shutdown signal_handling simple_ha single_active_consumer sync_detection term_to_binary_compat_prop topic_permission unit_access_control unit_access_control_authn_authz_context_propagation unit_access_control_credential_validation unit_amqp091_content_framing unit_amqp091_server_properties unit_app_management unit_cluster_formation_locking_mocks unit_collections unit_config_value_encryption unit_connection_tracking unit_credit_flow unit_disk_monitor unit_disk_monitor_mocks unit_file_handle_cache unit_gen_server2 unit_gm unit_log_config unit_log_management unit_operator_policy unit_pg_local unit_plugin_directories unit_plugin_versioning unit_policy_validators unit_priority_queue unit_queue_consumers unit_stats_and_metrics unit_supervisor2 unit_vm_memory_monitor upgrade_preparation vhost
- - name: RUN CHECKS
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci-rabbit
- dockerfile: ci/dockerfiles/ci-dep
- build_args: IMAGE_TAG=erlang-22.3-rabbitmq-${{ github.sha }},BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }},project=rabbit
- tags: erlang-22.3-rabbitmq-${{ github.sha }}
- rabbit-ct-amqqueue_backward_compatibility:
- name: rabbit-ct-amqqueue_backward_compatibility
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-amqqueue_backward_compatibility
- run: |
- mkdir ct-amqqueue_backward_compatibility-logs && chmod 777 ct-amqqueue_backward_compatibility-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=amqqueue_backward_compatibility \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-amqqueue_backward_compatibility-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-amqqueue_backward_compatibility-logs
- path: ct-amqqueue_backward_compatibility-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-amqqueue_backward_compatibility [mixed v3.7.28]
- run: |
- mkdir ct-amqqueue_backward_compatibility-logs-v3.7.28 && chmod 777 ct-amqqueue_backward_compatibility-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=amqqueue_backward_compatibility \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-amqqueue_backward_compatibility-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-amqqueue_backward_compatibility-logs-mixed-v3.7.28
- path: ct-amqqueue_backward_compatibility-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-amqqueue_backward_compatibility [mixed v3.8.9]
- run: |
- mkdir ct-amqqueue_backward_compatibility-logs-v3.8.9 && chmod 777 ct-amqqueue_backward_compatibility-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=amqqueue_backward_compatibility \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-amqqueue_backward_compatibility-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-amqqueue_backward_compatibility-logs-mixed-v3.8.9
- path: ct-amqqueue_backward_compatibility-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-backing_queue:
- name: rabbit-ct-backing_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-backing_queue
- run: |
- mkdir ct-backing_queue-logs && chmod 777 ct-backing_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=backing_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-backing_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-backing_queue-logs
- path: ct-backing_queue-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-backing_queue [mixed v3.7.28]
- run: |
- mkdir ct-backing_queue-logs-v3.7.28 && chmod 777 ct-backing_queue-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=backing_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-backing_queue-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-backing_queue-logs-mixed-v3.7.28
- path: ct-backing_queue-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-backing_queue [mixed v3.8.9]
- run: |
- mkdir ct-backing_queue-logs-v3.8.9 && chmod 777 ct-backing_queue-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=backing_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-backing_queue-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-backing_queue-logs-mixed-v3.8.9
- path: ct-backing_queue-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-channel_interceptor:
- name: rabbit-ct-channel_interceptor
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-channel_interceptor
- run: |
- mkdir ct-channel_interceptor-logs && chmod 777 ct-channel_interceptor-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=channel_interceptor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-channel_interceptor-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-channel_interceptor-logs
- path: ct-channel_interceptor-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-channel_interceptor [mixed v3.7.28]
- run: |
- mkdir ct-channel_interceptor-logs-v3.7.28 && chmod 777 ct-channel_interceptor-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=channel_interceptor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-channel_interceptor-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-channel_interceptor-logs-mixed-v3.7.28
- path: ct-channel_interceptor-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-channel_interceptor [mixed v3.8.9]
- run: |
- mkdir ct-channel_interceptor-logs-v3.8.9 && chmod 777 ct-channel_interceptor-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=channel_interceptor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-channel_interceptor-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-channel_interceptor-logs-mixed-v3.8.9
- path: ct-channel_interceptor-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-channel_operation_timeout:
- name: rabbit-ct-channel_operation_timeout
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-channel_operation_timeout
- run: |
- mkdir ct-channel_operation_timeout-logs && chmod 777 ct-channel_operation_timeout-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=channel_operation_timeout \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-channel_operation_timeout-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-channel_operation_timeout-logs
- path: ct-channel_operation_timeout-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-channel_operation_timeout [mixed v3.7.28]
- run: |
- mkdir ct-channel_operation_timeout-logs-v3.7.28 && chmod 777 ct-channel_operation_timeout-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=channel_operation_timeout \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-channel_operation_timeout-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-channel_operation_timeout-logs-mixed-v3.7.28
- path: ct-channel_operation_timeout-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-channel_operation_timeout [mixed v3.8.9]
- run: |
- mkdir ct-channel_operation_timeout-logs-v3.8.9 && chmod 777 ct-channel_operation_timeout-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=channel_operation_timeout \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-channel_operation_timeout-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-channel_operation_timeout-logs-mixed-v3.8.9
- path: ct-channel_operation_timeout-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-cluster:
- name: rabbit-ct-cluster
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-cluster
- run: |
- mkdir ct-cluster-logs && chmod 777 ct-cluster-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=cluster \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-cluster-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-cluster-logs
- path: ct-cluster-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-cluster [mixed v3.7.28]
- run: |
- mkdir ct-cluster-logs-v3.7.28 && chmod 777 ct-cluster-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=cluster \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-cluster-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-cluster-logs-mixed-v3.7.28
- path: ct-cluster-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-cluster [mixed v3.8.9]
- run: |
- mkdir ct-cluster-logs-v3.8.9 && chmod 777 ct-cluster-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=cluster \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-cluster-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-cluster-logs-mixed-v3.8.9
- path: ct-cluster-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-cluster_rename:
- name: rabbit-ct-cluster_rename
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-cluster_rename
- run: |
- mkdir ct-cluster_rename-logs && chmod 777 ct-cluster_rename-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=cluster_rename \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-cluster_rename-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-cluster_rename-logs
- path: ct-cluster_rename-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-cluster_rename [mixed v3.7.28]
- run: |
- mkdir ct-cluster_rename-logs-v3.7.28 && chmod 777 ct-cluster_rename-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=cluster_rename \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-cluster_rename-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-cluster_rename-logs-mixed-v3.7.28
- path: ct-cluster_rename-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-cluster_rename [mixed v3.8.9]
- run: |
- mkdir ct-cluster_rename-logs-v3.8.9 && chmod 777 ct-cluster_rename-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=cluster_rename \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-cluster_rename-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-cluster_rename-logs-mixed-v3.8.9
- path: ct-cluster_rename-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-clustering_management:
- name: rabbit-ct-clustering_management
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-clustering_management
- run: |
- mkdir ct-clustering_management-logs && chmod 777 ct-clustering_management-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=clustering_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-clustering_management-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-clustering_management-logs
- path: ct-clustering_management-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-clustering_management [mixed v3.7.28]
- run: |
- mkdir ct-clustering_management-logs-v3.7.28 && chmod 777 ct-clustering_management-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=clustering_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-clustering_management-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-clustering_management-logs-mixed-v3.7.28
- path: ct-clustering_management-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-clustering_management [mixed v3.8.9]
- run: |
- mkdir ct-clustering_management-logs-v3.8.9 && chmod 777 ct-clustering_management-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=clustering_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-clustering_management-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-clustering_management-logs-mixed-v3.8.9
- path: ct-clustering_management-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-config_schema:
- name: rabbit-ct-config_schema
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-config_schema
- run: |
- mkdir ct-config_schema-logs && chmod 777 ct-config_schema-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=config_schema \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-config_schema-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-config_schema-logs
- path: ct-config_schema-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-config_schema [mixed v3.7.28]
- run: |
- mkdir ct-config_schema-logs-v3.7.28 && chmod 777 ct-config_schema-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=config_schema \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-config_schema-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-config_schema-logs-mixed-v3.7.28
- path: ct-config_schema-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-config_schema [mixed v3.8.9]
- run: |
- mkdir ct-config_schema-logs-v3.8.9 && chmod 777 ct-config_schema-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=config_schema \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-config_schema-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-config_schema-logs-mixed-v3.8.9
- path: ct-config_schema-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-confirms_rejects:
- name: rabbit-ct-confirms_rejects
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-confirms_rejects
- run: |
- mkdir ct-confirms_rejects-logs && chmod 777 ct-confirms_rejects-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=confirms_rejects \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-confirms_rejects-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-confirms_rejects-logs
- path: ct-confirms_rejects-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-confirms_rejects [mixed v3.7.28]
- run: |
- mkdir ct-confirms_rejects-logs-v3.7.28 && chmod 777 ct-confirms_rejects-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=confirms_rejects \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-confirms_rejects-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-confirms_rejects-logs-mixed-v3.7.28
- path: ct-confirms_rejects-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-confirms_rejects [mixed v3.8.9]
- run: |
- mkdir ct-confirms_rejects-logs-v3.8.9 && chmod 777 ct-confirms_rejects-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=confirms_rejects \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-confirms_rejects-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-confirms_rejects-logs-mixed-v3.8.9
- path: ct-confirms_rejects-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-consumer_timeout:
- name: rabbit-ct-consumer_timeout
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-consumer_timeout
- run: |
- mkdir ct-consumer_timeout-logs && chmod 777 ct-consumer_timeout-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=consumer_timeout \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-consumer_timeout-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-consumer_timeout-logs
- path: ct-consumer_timeout-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-consumer_timeout [mixed v3.7.28]
- run: |
- mkdir ct-consumer_timeout-logs-v3.7.28 && chmod 777 ct-consumer_timeout-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=consumer_timeout \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-consumer_timeout-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-consumer_timeout-logs-mixed-v3.7.28
- path: ct-consumer_timeout-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-consumer_timeout [mixed v3.8.9]
- run: |
- mkdir ct-consumer_timeout-logs-v3.8.9 && chmod 777 ct-consumer_timeout-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=consumer_timeout \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-consumer_timeout-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-consumer_timeout-logs-mixed-v3.8.9
- path: ct-consumer_timeout-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-crashing_queues:
- name: rabbit-ct-crashing_queues
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-crashing_queues
- run: |
- mkdir ct-crashing_queues-logs && chmod 777 ct-crashing_queues-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=crashing_queues \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-crashing_queues-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-crashing_queues-logs
- path: ct-crashing_queues-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-crashing_queues [mixed v3.7.28]
- run: |
- mkdir ct-crashing_queues-logs-v3.7.28 && chmod 777 ct-crashing_queues-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=crashing_queues \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-crashing_queues-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-crashing_queues-logs-mixed-v3.7.28
- path: ct-crashing_queues-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-crashing_queues [mixed v3.8.9]
- run: |
- mkdir ct-crashing_queues-logs-v3.8.9 && chmod 777 ct-crashing_queues-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=crashing_queues \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-crashing_queues-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-crashing_queues-logs-mixed-v3.8.9
- path: ct-crashing_queues-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-dead_lettering:
- name: rabbit-ct-dead_lettering
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-dead_lettering
- run: |
- mkdir ct-dead_lettering-logs && chmod 777 ct-dead_lettering-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dead_lettering \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-dead_lettering-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dead_lettering-logs
- path: ct-dead_lettering-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-dead_lettering [mixed v3.7.28]
- run: |
- mkdir ct-dead_lettering-logs-v3.7.28 && chmod 777 ct-dead_lettering-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dead_lettering \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-dead_lettering-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dead_lettering-logs-mixed-v3.7.28
- path: ct-dead_lettering-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-dead_lettering [mixed v3.8.9]
- run: |
- mkdir ct-dead_lettering-logs-v3.8.9 && chmod 777 ct-dead_lettering-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dead_lettering \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-dead_lettering-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dead_lettering-logs-mixed-v3.8.9
- path: ct-dead_lettering-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-definition_import:
- name: rabbit-ct-definition_import
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-definition_import
- run: |
- mkdir ct-definition_import-logs && chmod 777 ct-definition_import-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=definition_import \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-definition_import-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-definition_import-logs
- path: ct-definition_import-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-definition_import [mixed v3.7.28]
- run: |
- mkdir ct-definition_import-logs-v3.7.28 && chmod 777 ct-definition_import-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=definition_import \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-definition_import-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-definition_import-logs-mixed-v3.7.28
- path: ct-definition_import-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-definition_import [mixed v3.8.9]
- run: |
- mkdir ct-definition_import-logs-v3.8.9 && chmod 777 ct-definition_import-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=definition_import \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-definition_import-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-definition_import-logs-mixed-v3.8.9
- path: ct-definition_import-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-disconnect_detected_during_alarm:
- name: rabbit-ct-disconnect_detected_during_alarm
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-disconnect_detected_during_alarm
- run: |
- mkdir ct-disconnect_detected_during_alarm-logs && chmod 777 ct-disconnect_detected_during_alarm-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=disconnect_detected_during_alarm \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-disconnect_detected_during_alarm-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-disconnect_detected_during_alarm-logs
- path: ct-disconnect_detected_during_alarm-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-disconnect_detected_during_alarm [mixed v3.7.28]
- run: |
- mkdir ct-disconnect_detected_during_alarm-logs-v3.7.28 && chmod 777 ct-disconnect_detected_during_alarm-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=disconnect_detected_during_alarm \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-disconnect_detected_during_alarm-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-disconnect_detected_during_alarm-logs-mixed-v3.7.28
- path: ct-disconnect_detected_during_alarm-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-disconnect_detected_during_alarm [mixed v3.8.9]
- run: |
- mkdir ct-disconnect_detected_during_alarm-logs-v3.8.9 && chmod 777 ct-disconnect_detected_during_alarm-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=disconnect_detected_during_alarm \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-disconnect_detected_during_alarm-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-disconnect_detected_during_alarm-logs-mixed-v3.8.9
- path: ct-disconnect_detected_during_alarm-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-dynamic_ha:
- name: rabbit-ct-dynamic_ha
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-dynamic_ha
- run: |
- mkdir ct-dynamic_ha-logs && chmod 777 ct-dynamic_ha-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dynamic_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-dynamic_ha-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dynamic_ha-logs
- path: ct-dynamic_ha-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-dynamic_ha [mixed v3.7.28]
- run: |
- mkdir ct-dynamic_ha-logs-v3.7.28 && chmod 777 ct-dynamic_ha-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dynamic_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-dynamic_ha-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dynamic_ha-logs-mixed-v3.7.28
- path: ct-dynamic_ha-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-dynamic_ha [mixed v3.8.9]
- run: |
- mkdir ct-dynamic_ha-logs-v3.8.9 && chmod 777 ct-dynamic_ha-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dynamic_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-dynamic_ha-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dynamic_ha-logs-mixed-v3.8.9
- path: ct-dynamic_ha-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-dynamic_qq:
- name: rabbit-ct-dynamic_qq
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-dynamic_qq
- run: |
- mkdir ct-dynamic_qq-logs && chmod 777 ct-dynamic_qq-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dynamic_qq \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-dynamic_qq-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dynamic_qq-logs
- path: ct-dynamic_qq-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-dynamic_qq [mixed v3.7.28]
- run: |
- mkdir ct-dynamic_qq-logs-v3.7.28 && chmod 777 ct-dynamic_qq-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dynamic_qq \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-dynamic_qq-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dynamic_qq-logs-mixed-v3.7.28
- path: ct-dynamic_qq-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-dynamic_qq [mixed v3.8.9]
- run: |
- mkdir ct-dynamic_qq-logs-v3.8.9 && chmod 777 ct-dynamic_qq-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dynamic_qq \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-dynamic_qq-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dynamic_qq-logs-mixed-v3.8.9
- path: ct-dynamic_qq-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-eager_sync:
- name: rabbit-ct-eager_sync
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-eager_sync
- run: |
- mkdir ct-eager_sync-logs && chmod 777 ct-eager_sync-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=eager_sync \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-eager_sync-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-eager_sync-logs
- path: ct-eager_sync-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-eager_sync [mixed v3.7.28]
- run: |
- mkdir ct-eager_sync-logs-v3.7.28 && chmod 777 ct-eager_sync-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=eager_sync \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-eager_sync-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-eager_sync-logs-mixed-v3.7.28
- path: ct-eager_sync-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-eager_sync [mixed v3.8.9]
- run: |
- mkdir ct-eager_sync-logs-v3.8.9 && chmod 777 ct-eager_sync-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=eager_sync \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-eager_sync-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-eager_sync-logs-mixed-v3.8.9
- path: ct-eager_sync-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-feature_flags:
- name: rabbit-ct-feature_flags
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-feature_flags
- run: |
- mkdir ct-feature_flags-logs && chmod 777 ct-feature_flags-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=feature_flags \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-feature_flags-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-feature_flags-logs
- path: ct-feature_flags-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-feature_flags [mixed v3.7.28]
- run: |
- mkdir ct-feature_flags-logs-v3.7.28 && chmod 777 ct-feature_flags-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=feature_flags \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-feature_flags-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-feature_flags-logs-mixed-v3.7.28
- path: ct-feature_flags-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-feature_flags [mixed v3.8.9]
- run: |
- mkdir ct-feature_flags-logs-v3.8.9 && chmod 777 ct-feature_flags-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=feature_flags \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-feature_flags-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-feature_flags-logs-mixed-v3.8.9
- path: ct-feature_flags-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-lazy_queue:
- name: rabbit-ct-lazy_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-lazy_queue
- run: |
- mkdir ct-lazy_queue-logs && chmod 777 ct-lazy_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=lazy_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-lazy_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-lazy_queue-logs
- path: ct-lazy_queue-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-lazy_queue [mixed v3.7.28]
- run: |
- mkdir ct-lazy_queue-logs-v3.7.28 && chmod 777 ct-lazy_queue-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=lazy_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-lazy_queue-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-lazy_queue-logs-mixed-v3.7.28
- path: ct-lazy_queue-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-lazy_queue [mixed v3.8.9]
- run: |
- mkdir ct-lazy_queue-logs-v3.8.9 && chmod 777 ct-lazy_queue-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=lazy_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-lazy_queue-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-lazy_queue-logs-mixed-v3.8.9
- path: ct-lazy_queue-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-list_consumers_sanity_check:
- name: rabbit-ct-list_consumers_sanity_check
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-list_consumers_sanity_check
- run: |
- mkdir ct-list_consumers_sanity_check-logs && chmod 777 ct-list_consumers_sanity_check-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=list_consumers_sanity_check \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-list_consumers_sanity_check-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-list_consumers_sanity_check-logs
- path: ct-list_consumers_sanity_check-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-list_consumers_sanity_check [mixed v3.7.28]
- run: |
- mkdir ct-list_consumers_sanity_check-logs-v3.7.28 && chmod 777 ct-list_consumers_sanity_check-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=list_consumers_sanity_check \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-list_consumers_sanity_check-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-list_consumers_sanity_check-logs-mixed-v3.7.28
- path: ct-list_consumers_sanity_check-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-list_consumers_sanity_check [mixed v3.8.9]
- run: |
- mkdir ct-list_consumers_sanity_check-logs-v3.8.9 && chmod 777 ct-list_consumers_sanity_check-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=list_consumers_sanity_check \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-list_consumers_sanity_check-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-list_consumers_sanity_check-logs-mixed-v3.8.9
- path: ct-list_consumers_sanity_check-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-list_queues_online_and_offline:
- name: rabbit-ct-list_queues_online_and_offline
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-list_queues_online_and_offline
- run: |
- mkdir ct-list_queues_online_and_offline-logs && chmod 777 ct-list_queues_online_and_offline-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=list_queues_online_and_offline \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-list_queues_online_and_offline-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-list_queues_online_and_offline-logs
- path: ct-list_queues_online_and_offline-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-list_queues_online_and_offline [mixed v3.7.28]
- run: |
- mkdir ct-list_queues_online_and_offline-logs-v3.7.28 && chmod 777 ct-list_queues_online_and_offline-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=list_queues_online_and_offline \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-list_queues_online_and_offline-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-list_queues_online_and_offline-logs-mixed-v3.7.28
- path: ct-list_queues_online_and_offline-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-list_queues_online_and_offline [mixed v3.8.9]
- run: |
- mkdir ct-list_queues_online_and_offline-logs-v3.8.9 && chmod 777 ct-list_queues_online_and_offline-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=list_queues_online_and_offline \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-list_queues_online_and_offline-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-list_queues_online_and_offline-logs-mixed-v3.8.9
- path: ct-list_queues_online_and_offline-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-maintenance_mode:
- name: rabbit-ct-maintenance_mode
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-maintenance_mode
- run: |
- mkdir ct-maintenance_mode-logs && chmod 777 ct-maintenance_mode-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=maintenance_mode \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-maintenance_mode-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-maintenance_mode-logs
- path: ct-maintenance_mode-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-maintenance_mode [mixed v3.7.28]
- run: |
- mkdir ct-maintenance_mode-logs-v3.7.28 && chmod 777 ct-maintenance_mode-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=maintenance_mode \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-maintenance_mode-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-maintenance_mode-logs-mixed-v3.7.28
- path: ct-maintenance_mode-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-maintenance_mode [mixed v3.8.9]
- run: |
- mkdir ct-maintenance_mode-logs-v3.8.9 && chmod 777 ct-maintenance_mode-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=maintenance_mode \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-maintenance_mode-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-maintenance_mode-logs-mixed-v3.8.9
- path: ct-maintenance_mode-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-many_node_ha:
- name: rabbit-ct-many_node_ha
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-many_node_ha
- run: |
- mkdir ct-many_node_ha-logs && chmod 777 ct-many_node_ha-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=many_node_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-many_node_ha-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-many_node_ha-logs
- path: ct-many_node_ha-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-many_node_ha [mixed v3.7.28]
- run: |
- mkdir ct-many_node_ha-logs-v3.7.28 && chmod 777 ct-many_node_ha-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=many_node_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-many_node_ha-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-many_node_ha-logs-mixed-v3.7.28
- path: ct-many_node_ha-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-many_node_ha [mixed v3.8.9]
- run: |
- mkdir ct-many_node_ha-logs-v3.8.9 && chmod 777 ct-many_node_ha-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=many_node_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-many_node_ha-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-many_node_ha-logs-mixed-v3.8.9
- path: ct-many_node_ha-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-message_size_limit:
- name: rabbit-ct-message_size_limit
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-message_size_limit
- run: |
- mkdir ct-message_size_limit-logs && chmod 777 ct-message_size_limit-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=message_size_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-message_size_limit-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-message_size_limit-logs
- path: ct-message_size_limit-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-message_size_limit [mixed v3.7.28]
- run: |
- mkdir ct-message_size_limit-logs-v3.7.28 && chmod 777 ct-message_size_limit-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=message_size_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-message_size_limit-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-message_size_limit-logs-mixed-v3.7.28
- path: ct-message_size_limit-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-message_size_limit [mixed v3.8.9]
- run: |
- mkdir ct-message_size_limit-logs-v3.8.9 && chmod 777 ct-message_size_limit-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=message_size_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-message_size_limit-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-message_size_limit-logs-mixed-v3.8.9
- path: ct-message_size_limit-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-metrics:
- name: rabbit-ct-metrics
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-metrics
- run: |
- mkdir ct-metrics-logs && chmod 777 ct-metrics-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=metrics \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-metrics-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-metrics-logs
- path: ct-metrics-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-metrics [mixed v3.7.28]
- run: |
- mkdir ct-metrics-logs-v3.7.28 && chmod 777 ct-metrics-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=metrics \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-metrics-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-metrics-logs-mixed-v3.7.28
- path: ct-metrics-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-metrics [mixed v3.8.9]
- run: |
- mkdir ct-metrics-logs-v3.8.9 && chmod 777 ct-metrics-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=metrics \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-metrics-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-metrics-logs-mixed-v3.8.9
- path: ct-metrics-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-mirrored_supervisor:
- name: rabbit-ct-mirrored_supervisor
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-mirrored_supervisor
- run: |
- mkdir ct-mirrored_supervisor-logs && chmod 777 ct-mirrored_supervisor-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=mirrored_supervisor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-mirrored_supervisor-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-mirrored_supervisor-logs
- path: ct-mirrored_supervisor-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-mirrored_supervisor [mixed v3.7.28]
- run: |
- mkdir ct-mirrored_supervisor-logs-v3.7.28 && chmod 777 ct-mirrored_supervisor-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=mirrored_supervisor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-mirrored_supervisor-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-mirrored_supervisor-logs-mixed-v3.7.28
- path: ct-mirrored_supervisor-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-mirrored_supervisor [mixed v3.8.9]
- run: |
- mkdir ct-mirrored_supervisor-logs-v3.8.9 && chmod 777 ct-mirrored_supervisor-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=mirrored_supervisor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-mirrored_supervisor-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-mirrored_supervisor-logs-mixed-v3.8.9
- path: ct-mirrored_supervisor-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-msg_store:
- name: rabbit-ct-msg_store
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-msg_store
- run: |
- mkdir ct-msg_store-logs && chmod 777 ct-msg_store-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=msg_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-msg_store-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-msg_store-logs
- path: ct-msg_store-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-msg_store [mixed v3.7.28]
- run: |
- mkdir ct-msg_store-logs-v3.7.28 && chmod 777 ct-msg_store-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=msg_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-msg_store-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-msg_store-logs-mixed-v3.7.28
- path: ct-msg_store-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-msg_store [mixed v3.8.9]
- run: |
- mkdir ct-msg_store-logs-v3.8.9 && chmod 777 ct-msg_store-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=msg_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-msg_store-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-msg_store-logs-mixed-v3.8.9
- path: ct-msg_store-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-peer_discovery_classic_config:
- name: rabbit-ct-peer_discovery_classic_config
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-peer_discovery_classic_config
- run: |
- mkdir ct-peer_discovery_classic_config-logs && chmod 777 ct-peer_discovery_classic_config-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=peer_discovery_classic_config \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-peer_discovery_classic_config-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-peer_discovery_classic_config-logs
- path: ct-peer_discovery_classic_config-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-peer_discovery_classic_config [mixed v3.7.28]
- run: |
- mkdir ct-peer_discovery_classic_config-logs-v3.7.28 && chmod 777 ct-peer_discovery_classic_config-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=peer_discovery_classic_config \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-peer_discovery_classic_config-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-peer_discovery_classic_config-logs-mixed-v3.7.28
- path: ct-peer_discovery_classic_config-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-peer_discovery_classic_config [mixed v3.8.9]
- run: |
- mkdir ct-peer_discovery_classic_config-logs-v3.8.9 && chmod 777 ct-peer_discovery_classic_config-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=peer_discovery_classic_config \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-peer_discovery_classic_config-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-peer_discovery_classic_config-logs-mixed-v3.8.9
- path: ct-peer_discovery_classic_config-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-peer_discovery_dns:
- name: rabbit-ct-peer_discovery_dns
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-peer_discovery_dns
- run: |
- mkdir ct-peer_discovery_dns-logs && chmod 777 ct-peer_discovery_dns-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=peer_discovery_dns \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-peer_discovery_dns-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-peer_discovery_dns-logs
- path: ct-peer_discovery_dns-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-peer_discovery_dns [mixed v3.7.28]
- run: |
- mkdir ct-peer_discovery_dns-logs-v3.7.28 && chmod 777 ct-peer_discovery_dns-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=peer_discovery_dns \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-peer_discovery_dns-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-peer_discovery_dns-logs-mixed-v3.7.28
- path: ct-peer_discovery_dns-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-peer_discovery_dns [mixed v3.8.9]
- run: |
- mkdir ct-peer_discovery_dns-logs-v3.8.9 && chmod 777 ct-peer_discovery_dns-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=peer_discovery_dns \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-peer_discovery_dns-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-peer_discovery_dns-logs-mixed-v3.8.9
- path: ct-peer_discovery_dns-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-per_user_connection_channel_limit:
- name: rabbit-ct-per_user_connection_channel_limit
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_user_connection_channel_limit
- run: |
- mkdir ct-per_user_connection_channel_limit-logs && chmod 777 ct-per_user_connection_channel_limit-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_user_connection_channel_limit-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_limit-logs
- path: ct-per_user_connection_channel_limit-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-per_user_connection_channel_limit [mixed v3.7.28]
- run: |
- mkdir ct-per_user_connection_channel_limit-logs-v3.7.28 && chmod 777 ct-per_user_connection_channel_limit-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-per_user_connection_channel_limit-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_limit-logs-mixed-v3.7.28
- path: ct-per_user_connection_channel_limit-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-per_user_connection_channel_limit [mixed v3.8.9]
- run: |
- mkdir ct-per_user_connection_channel_limit-logs-v3.8.9 && chmod 777 ct-per_user_connection_channel_limit-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-per_user_connection_channel_limit-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_limit-logs-mixed-v3.8.9
- path: ct-per_user_connection_channel_limit-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-per_user_connection_channel_limit_partitions:
- name: rabbit-ct-per_user_connection_channel_limit_partitions
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_user_connection_channel_limit_partitions
- run: |
- mkdir ct-per_user_connection_channel_limit_partitions-logs && chmod 777 ct-per_user_connection_channel_limit_partitions-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_limit_partitions \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_user_connection_channel_limit_partitions-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_limit_partitions-logs
- path: ct-per_user_connection_channel_limit_partitions-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-per_user_connection_channel_limit_partitions [mixed v3.7.28]
- run: |
- mkdir ct-per_user_connection_channel_limit_partitions-logs-v3.7.28 && chmod 777 ct-per_user_connection_channel_limit_partitions-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_limit_partitions \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-per_user_connection_channel_limit_partitions-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_limit_partitions-logs-mixed-v3.7.28
- path: ct-per_user_connection_channel_limit_partitions-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-per_user_connection_channel_limit_partitions [mixed v3.8.9]
- run: |
- mkdir ct-per_user_connection_channel_limit_partitions-logs-v3.8.9 && chmod 777 ct-per_user_connection_channel_limit_partitions-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_limit_partitions \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-per_user_connection_channel_limit_partitions-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_limit_partitions-logs-mixed-v3.8.9
- path: ct-per_user_connection_channel_limit_partitions-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-per_user_connection_channel_tracking:
- name: rabbit-ct-per_user_connection_channel_tracking
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_user_connection_channel_tracking
- run: |
- mkdir ct-per_user_connection_channel_tracking-logs && chmod 777 ct-per_user_connection_channel_tracking-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_user_connection_channel_tracking-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_tracking-logs
- path: ct-per_user_connection_channel_tracking-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-per_user_connection_channel_tracking [mixed v3.7.28]
- run: |
- mkdir ct-per_user_connection_channel_tracking-logs-v3.7.28 && chmod 777 ct-per_user_connection_channel_tracking-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-per_user_connection_channel_tracking-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_tracking-logs-mixed-v3.7.28
- path: ct-per_user_connection_channel_tracking-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-per_user_connection_channel_tracking [mixed v3.8.9]
- run: |
- mkdir ct-per_user_connection_channel_tracking-logs-v3.8.9 && chmod 777 ct-per_user_connection_channel_tracking-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-per_user_connection_channel_tracking-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_tracking-logs-mixed-v3.8.9
- path: ct-per_user_connection_channel_tracking-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-per_user_connection_tracking:
- name: rabbit-ct-per_user_connection_tracking
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_user_connection_tracking
- run: |
- mkdir ct-per_user_connection_tracking-logs && chmod 777 ct-per_user_connection_tracking-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_user_connection_tracking-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_tracking-logs
- path: ct-per_user_connection_tracking-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-per_user_connection_tracking [mixed v3.7.28]
- run: |
- mkdir ct-per_user_connection_tracking-logs-v3.7.28 && chmod 777 ct-per_user_connection_tracking-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-per_user_connection_tracking-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_tracking-logs-mixed-v3.7.28
- path: ct-per_user_connection_tracking-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-per_user_connection_tracking [mixed v3.8.9]
- run: |
- mkdir ct-per_user_connection_tracking-logs-v3.8.9 && chmod 777 ct-per_user_connection_tracking-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-per_user_connection_tracking-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_tracking-logs-mixed-v3.8.9
- path: ct-per_user_connection_tracking-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-per_vhost_connection_limit:
- name: rabbit-ct-per_vhost_connection_limit
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_vhost_connection_limit
- run: |
- mkdir ct-per_vhost_connection_limit-logs && chmod 777 ct-per_vhost_connection_limit-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_connection_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_vhost_connection_limit-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_connection_limit-logs
- path: ct-per_vhost_connection_limit-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-per_vhost_connection_limit [mixed v3.7.28]
- run: |
- mkdir ct-per_vhost_connection_limit-logs-v3.7.28 && chmod 777 ct-per_vhost_connection_limit-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_connection_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-per_vhost_connection_limit-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_connection_limit-logs-mixed-v3.7.28
- path: ct-per_vhost_connection_limit-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-per_vhost_connection_limit [mixed v3.8.9]
- run: |
- mkdir ct-per_vhost_connection_limit-logs-v3.8.9 && chmod 777 ct-per_vhost_connection_limit-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_connection_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-per_vhost_connection_limit-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_connection_limit-logs-mixed-v3.8.9
- path: ct-per_vhost_connection_limit-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-per_vhost_connection_limit_partitions:
- name: rabbit-ct-per_vhost_connection_limit_partitions
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_vhost_connection_limit_partitions
- run: |
- mkdir ct-per_vhost_connection_limit_partitions-logs && chmod 777 ct-per_vhost_connection_limit_partitions-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_connection_limit_partitions \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_vhost_connection_limit_partitions-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_connection_limit_partitions-logs
- path: ct-per_vhost_connection_limit_partitions-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-per_vhost_connection_limit_partitions [mixed v3.7.28]
- run: |
- mkdir ct-per_vhost_connection_limit_partitions-logs-v3.7.28 && chmod 777 ct-per_vhost_connection_limit_partitions-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_connection_limit_partitions \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-per_vhost_connection_limit_partitions-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_connection_limit_partitions-logs-mixed-v3.7.28
- path: ct-per_vhost_connection_limit_partitions-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-per_vhost_connection_limit_partitions [mixed v3.8.9]
- run: |
- mkdir ct-per_vhost_connection_limit_partitions-logs-v3.8.9 && chmod 777 ct-per_vhost_connection_limit_partitions-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_connection_limit_partitions \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-per_vhost_connection_limit_partitions-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_connection_limit_partitions-logs-mixed-v3.8.9
- path: ct-per_vhost_connection_limit_partitions-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-per_vhost_msg_store:
- name: rabbit-ct-per_vhost_msg_store
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_vhost_msg_store
- run: |
- mkdir ct-per_vhost_msg_store-logs && chmod 777 ct-per_vhost_msg_store-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_msg_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_vhost_msg_store-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_msg_store-logs
- path: ct-per_vhost_msg_store-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-per_vhost_msg_store [mixed v3.7.28]
- run: |
- mkdir ct-per_vhost_msg_store-logs-v3.7.28 && chmod 777 ct-per_vhost_msg_store-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_msg_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-per_vhost_msg_store-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_msg_store-logs-mixed-v3.7.28
- path: ct-per_vhost_msg_store-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-per_vhost_msg_store [mixed v3.8.9]
- run: |
- mkdir ct-per_vhost_msg_store-logs-v3.8.9 && chmod 777 ct-per_vhost_msg_store-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_msg_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-per_vhost_msg_store-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_msg_store-logs-mixed-v3.8.9
- path: ct-per_vhost_msg_store-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-per_vhost_queue_limit:
- name: rabbit-ct-per_vhost_queue_limit
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_vhost_queue_limit
- run: |
- mkdir ct-per_vhost_queue_limit-logs && chmod 777 ct-per_vhost_queue_limit-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_queue_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_vhost_queue_limit-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_queue_limit-logs
- path: ct-per_vhost_queue_limit-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-per_vhost_queue_limit [mixed v3.7.28]
- run: |
- mkdir ct-per_vhost_queue_limit-logs-v3.7.28 && chmod 777 ct-per_vhost_queue_limit-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_queue_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-per_vhost_queue_limit-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_queue_limit-logs-mixed-v3.7.28
- path: ct-per_vhost_queue_limit-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-per_vhost_queue_limit [mixed v3.8.9]
- run: |
- mkdir ct-per_vhost_queue_limit-logs-v3.8.9 && chmod 777 ct-per_vhost_queue_limit-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_queue_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-per_vhost_queue_limit-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_queue_limit-logs-mixed-v3.8.9
- path: ct-per_vhost_queue_limit-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-policy:
- name: rabbit-ct-policy
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-policy
- run: |
- mkdir ct-policy-logs && chmod 777 ct-policy-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=policy \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-policy-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-policy-logs
- path: ct-policy-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-policy [mixed v3.7.28]
- run: |
- mkdir ct-policy-logs-v3.7.28 && chmod 777 ct-policy-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=policy \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-policy-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-policy-logs-mixed-v3.7.28
- path: ct-policy-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-policy [mixed v3.8.9]
- run: |
- mkdir ct-policy-logs-v3.8.9 && chmod 777 ct-policy-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=policy \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-policy-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-policy-logs-mixed-v3.8.9
- path: ct-policy-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-priority_queue:
- name: rabbit-ct-priority_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-priority_queue
- run: |
- mkdir ct-priority_queue-logs && chmod 777 ct-priority_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=priority_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-priority_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-priority_queue-logs
- path: ct-priority_queue-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-priority_queue [mixed v3.7.28]
- run: |
- mkdir ct-priority_queue-logs-v3.7.28 && chmod 777 ct-priority_queue-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=priority_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-priority_queue-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-priority_queue-logs-mixed-v3.7.28
- path: ct-priority_queue-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-priority_queue [mixed v3.8.9]
- run: |
- mkdir ct-priority_queue-logs-v3.8.9 && chmod 777 ct-priority_queue-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=priority_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-priority_queue-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-priority_queue-logs-mixed-v3.8.9
- path: ct-priority_queue-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-priority_queue_recovery:
- name: rabbit-ct-priority_queue_recovery
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-priority_queue_recovery
- run: |
- mkdir ct-priority_queue_recovery-logs && chmod 777 ct-priority_queue_recovery-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=priority_queue_recovery \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-priority_queue_recovery-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-priority_queue_recovery-logs
- path: ct-priority_queue_recovery-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-priority_queue_recovery [mixed v3.7.28]
- run: |
- mkdir ct-priority_queue_recovery-logs-v3.7.28 && chmod 777 ct-priority_queue_recovery-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=priority_queue_recovery \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-priority_queue_recovery-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-priority_queue_recovery-logs-mixed-v3.7.28
- path: ct-priority_queue_recovery-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-priority_queue_recovery [mixed v3.8.9]
- run: |
- mkdir ct-priority_queue_recovery-logs-v3.8.9 && chmod 777 ct-priority_queue_recovery-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=priority_queue_recovery \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-priority_queue_recovery-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-priority_queue_recovery-logs-mixed-v3.8.9
- path: ct-priority_queue_recovery-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-product_info:
- name: rabbit-ct-product_info
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-product_info
- run: |
- mkdir ct-product_info-logs && chmod 777 ct-product_info-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=product_info \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-product_info-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-product_info-logs
- path: ct-product_info-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-product_info [mixed v3.7.28]
- run: |
- mkdir ct-product_info-logs-v3.7.28 && chmod 777 ct-product_info-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=product_info \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-product_info-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-product_info-logs-mixed-v3.7.28
- path: ct-product_info-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-product_info [mixed v3.8.9]
- run: |
- mkdir ct-product_info-logs-v3.8.9 && chmod 777 ct-product_info-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=product_info \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-product_info-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-product_info-logs-mixed-v3.8.9
- path: ct-product_info-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-proxy_protocol:
- name: rabbit-ct-proxy_protocol
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-proxy_protocol
- run: |
- mkdir ct-proxy_protocol-logs && chmod 777 ct-proxy_protocol-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=proxy_protocol \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-proxy_protocol-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-proxy_protocol-logs
- path: ct-proxy_protocol-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-proxy_protocol [mixed v3.7.28]
- run: |
- mkdir ct-proxy_protocol-logs-v3.7.28 && chmod 777 ct-proxy_protocol-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=proxy_protocol \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-proxy_protocol-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-proxy_protocol-logs-mixed-v3.7.28
- path: ct-proxy_protocol-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-proxy_protocol [mixed v3.8.9]
- run: |
- mkdir ct-proxy_protocol-logs-v3.8.9 && chmod 777 ct-proxy_protocol-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=proxy_protocol \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-proxy_protocol-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-proxy_protocol-logs-mixed-v3.8.9
- path: ct-proxy_protocol-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-publisher_confirms_parallel:
- name: rabbit-ct-publisher_confirms_parallel
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-publisher_confirms_parallel
- run: |
- mkdir ct-publisher_confirms_parallel-logs && chmod 777 ct-publisher_confirms_parallel-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=publisher_confirms_parallel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-publisher_confirms_parallel-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-publisher_confirms_parallel-logs
- path: ct-publisher_confirms_parallel-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-publisher_confirms_parallel [mixed v3.7.28]
- run: |
- mkdir ct-publisher_confirms_parallel-logs-v3.7.28 && chmod 777 ct-publisher_confirms_parallel-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=publisher_confirms_parallel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-publisher_confirms_parallel-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-publisher_confirms_parallel-logs-mixed-v3.7.28
- path: ct-publisher_confirms_parallel-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-publisher_confirms_parallel [mixed v3.8.9]
- run: |
- mkdir ct-publisher_confirms_parallel-logs-v3.8.9 && chmod 777 ct-publisher_confirms_parallel-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=publisher_confirms_parallel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-publisher_confirms_parallel-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-publisher_confirms_parallel-logs-mixed-v3.8.9
- path: ct-publisher_confirms_parallel-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-queue_length_limits:
- name: rabbit-ct-queue_length_limits
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-queue_length_limits
- run: |
- mkdir ct-queue_length_limits-logs && chmod 777 ct-queue_length_limits-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_length_limits \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-queue_length_limits-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_length_limits-logs
- path: ct-queue_length_limits-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-queue_length_limits [mixed v3.7.28]
- run: |
- mkdir ct-queue_length_limits-logs-v3.7.28 && chmod 777 ct-queue_length_limits-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_length_limits \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-queue_length_limits-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_length_limits-logs-mixed-v3.7.28
- path: ct-queue_length_limits-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-queue_length_limits [mixed v3.8.9]
- run: |
- mkdir ct-queue_length_limits-logs-v3.8.9 && chmod 777 ct-queue_length_limits-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_length_limits \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-queue_length_limits-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_length_limits-logs-mixed-v3.8.9
- path: ct-queue_length_limits-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-queue_master_location:
- name: rabbit-ct-queue_master_location
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-queue_master_location
- run: |
- mkdir ct-queue_master_location-logs && chmod 777 ct-queue_master_location-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_master_location \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-queue_master_location-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_master_location-logs
- path: ct-queue_master_location-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-queue_master_location [mixed v3.7.28]
- run: |
- mkdir ct-queue_master_location-logs-v3.7.28 && chmod 777 ct-queue_master_location-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_master_location \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-queue_master_location-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_master_location-logs-mixed-v3.7.28
- path: ct-queue_master_location-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-queue_master_location [mixed v3.8.9]
- run: |
- mkdir ct-queue_master_location-logs-v3.8.9 && chmod 777 ct-queue_master_location-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_master_location \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-queue_master_location-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_master_location-logs-mixed-v3.8.9
- path: ct-queue_master_location-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-queue_parallel:
- name: rabbit-ct-queue_parallel
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-queue_parallel
- run: |
- mkdir ct-queue_parallel-logs && chmod 777 ct-queue_parallel-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_parallel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-queue_parallel-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_parallel-logs
- path: ct-queue_parallel-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-queue_parallel [mixed v3.7.28]
- run: |
- mkdir ct-queue_parallel-logs-v3.7.28 && chmod 777 ct-queue_parallel-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_parallel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-queue_parallel-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_parallel-logs-mixed-v3.7.28
- path: ct-queue_parallel-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-queue_parallel [mixed v3.8.9]
- run: |
- mkdir ct-queue_parallel-logs-v3.8.9 && chmod 777 ct-queue_parallel-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_parallel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-queue_parallel-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_parallel-logs-mixed-v3.8.9
- path: ct-queue_parallel-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-queue_type:
- name: rabbit-ct-queue_type
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-queue_type
- run: |
- mkdir ct-queue_type-logs && chmod 777 ct-queue_type-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_type \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-queue_type-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_type-logs
- path: ct-queue_type-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-queue_type [mixed v3.7.28]
- run: |
- mkdir ct-queue_type-logs-v3.7.28 && chmod 777 ct-queue_type-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_type \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-queue_type-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_type-logs-mixed-v3.7.28
- path: ct-queue_type-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-queue_type [mixed v3.8.9]
- run: |
- mkdir ct-queue_type-logs-v3.8.9 && chmod 777 ct-queue_type-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_type \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-queue_type-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_type-logs-mixed-v3.8.9
- path: ct-queue_type-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-quorum_queue:
- name: rabbit-ct-quorum_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-quorum_queue
- run: |
- mkdir ct-quorum_queue-logs && chmod 777 ct-quorum_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=quorum_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-quorum_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-quorum_queue-logs
- path: ct-quorum_queue-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-quorum_queue [mixed v3.7.28]
- run: |
- mkdir ct-quorum_queue-logs-v3.7.28 && chmod 777 ct-quorum_queue-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=quorum_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-quorum_queue-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-quorum_queue-logs-mixed-v3.7.28
- path: ct-quorum_queue-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-quorum_queue [mixed v3.8.9]
- run: |
- mkdir ct-quorum_queue-logs-v3.8.9 && chmod 777 ct-quorum_queue-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=quorum_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-quorum_queue-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-quorum_queue-logs-mixed-v3.8.9
- path: ct-quorum_queue-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_confirms:
- name: rabbit-ct-rabbit_confirms
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_confirms
- run: |
- mkdir ct-rabbit_confirms-logs && chmod 777 ct-rabbit_confirms-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_confirms \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_confirms-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_confirms-logs
- path: ct-rabbit_confirms-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_confirms [mixed v3.7.28]
- run: |
- mkdir ct-rabbit_confirms-logs-v3.7.28 && chmod 777 ct-rabbit_confirms-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_confirms \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbit_confirms-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_confirms-logs-mixed-v3.7.28
- path: ct-rabbit_confirms-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_confirms [mixed v3.8.9]
- run: |
- mkdir ct-rabbit_confirms-logs-v3.8.9 && chmod 777 ct-rabbit_confirms-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_confirms \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbit_confirms-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_confirms-logs-mixed-v3.8.9
- path: ct-rabbit_confirms-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_core_metrics_gc:
- name: rabbit-ct-rabbit_core_metrics_gc
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_core_metrics_gc
- run: |
- mkdir ct-rabbit_core_metrics_gc-logs && chmod 777 ct-rabbit_core_metrics_gc-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_core_metrics_gc \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_core_metrics_gc-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_core_metrics_gc-logs
- path: ct-rabbit_core_metrics_gc-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_core_metrics_gc [mixed v3.7.28]
- run: |
- mkdir ct-rabbit_core_metrics_gc-logs-v3.7.28 && chmod 777 ct-rabbit_core_metrics_gc-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_core_metrics_gc \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbit_core_metrics_gc-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_core_metrics_gc-logs-mixed-v3.7.28
- path: ct-rabbit_core_metrics_gc-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_core_metrics_gc [mixed v3.8.9]
- run: |
- mkdir ct-rabbit_core_metrics_gc-logs-v3.8.9 && chmod 777 ct-rabbit_core_metrics_gc-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_core_metrics_gc \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbit_core_metrics_gc-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_core_metrics_gc-logs-mixed-v3.8.9
- path: ct-rabbit_core_metrics_gc-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_fifo:
- name: rabbit-ct-rabbit_fifo
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_fifo
- run: |
- mkdir ct-rabbit_fifo-logs && chmod 777 ct-rabbit_fifo-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_fifo-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo-logs
- path: ct-rabbit_fifo-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_fifo [mixed v3.7.28]
- run: |
- mkdir ct-rabbit_fifo-logs-v3.7.28 && chmod 777 ct-rabbit_fifo-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbit_fifo-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo-logs-mixed-v3.7.28
- path: ct-rabbit_fifo-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_fifo [mixed v3.8.9]
- run: |
- mkdir ct-rabbit_fifo-logs-v3.8.9 && chmod 777 ct-rabbit_fifo-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbit_fifo-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo-logs-mixed-v3.8.9
- path: ct-rabbit_fifo-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_fifo_int:
- name: rabbit-ct-rabbit_fifo_int
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_fifo_int
- run: |
- mkdir ct-rabbit_fifo_int-logs && chmod 777 ct-rabbit_fifo_int-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_int \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_fifo_int-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_int-logs
- path: ct-rabbit_fifo_int-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_fifo_int [mixed v3.7.28]
- run: |
- mkdir ct-rabbit_fifo_int-logs-v3.7.28 && chmod 777 ct-rabbit_fifo_int-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_int \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbit_fifo_int-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_int-logs-mixed-v3.7.28
- path: ct-rabbit_fifo_int-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_fifo_int [mixed v3.8.9]
- run: |
- mkdir ct-rabbit_fifo_int-logs-v3.8.9 && chmod 777 ct-rabbit_fifo_int-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_int \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbit_fifo_int-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_int-logs-mixed-v3.8.9
- path: ct-rabbit_fifo_int-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_fifo_prop:
- name: rabbit-ct-rabbit_fifo_prop
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_fifo_prop
- run: |
- mkdir ct-rabbit_fifo_prop-logs && chmod 777 ct-rabbit_fifo_prop-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_prop \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_fifo_prop-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_prop-logs
- path: ct-rabbit_fifo_prop-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_fifo_prop [mixed v3.7.28]
- run: |
- mkdir ct-rabbit_fifo_prop-logs-v3.7.28 && chmod 777 ct-rabbit_fifo_prop-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_prop \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbit_fifo_prop-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_prop-logs-mixed-v3.7.28
- path: ct-rabbit_fifo_prop-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_fifo_prop [mixed v3.8.9]
- run: |
- mkdir ct-rabbit_fifo_prop-logs-v3.8.9 && chmod 777 ct-rabbit_fifo_prop-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_prop \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbit_fifo_prop-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_prop-logs-mixed-v3.8.9
- path: ct-rabbit_fifo_prop-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_fifo_v0:
- name: rabbit-ct-rabbit_fifo_v0
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_fifo_v0
- run: |
- mkdir ct-rabbit_fifo_v0-logs && chmod 777 ct-rabbit_fifo_v0-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_v0 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_fifo_v0-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_v0-logs
- path: ct-rabbit_fifo_v0-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_fifo_v0 [mixed v3.7.28]
- run: |
- mkdir ct-rabbit_fifo_v0-logs-v3.7.28 && chmod 777 ct-rabbit_fifo_v0-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_v0 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbit_fifo_v0-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_v0-logs-mixed-v3.7.28
- path: ct-rabbit_fifo_v0-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_fifo_v0 [mixed v3.8.9]
- run: |
- mkdir ct-rabbit_fifo_v0-logs-v3.8.9 && chmod 777 ct-rabbit_fifo_v0-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_v0 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbit_fifo_v0-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_v0-logs-mixed-v3.8.9
- path: ct-rabbit_fifo_v0-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_msg_record:
- name: rabbit-ct-rabbit_msg_record
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_msg_record
- run: |
- mkdir ct-rabbit_msg_record-logs && chmod 777 ct-rabbit_msg_record-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_msg_record \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_msg_record-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_msg_record-logs
- path: ct-rabbit_msg_record-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_msg_record [mixed v3.7.28]
- run: |
- mkdir ct-rabbit_msg_record-logs-v3.7.28 && chmod 777 ct-rabbit_msg_record-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_msg_record \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbit_msg_record-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_msg_record-logs-mixed-v3.7.28
- path: ct-rabbit_msg_record-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_msg_record [mixed v3.8.9]
- run: |
- mkdir ct-rabbit_msg_record-logs-v3.8.9 && chmod 777 ct-rabbit_msg_record-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_msg_record \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbit_msg_record-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_msg_record-logs-mixed-v3.8.9
- path: ct-rabbit_msg_record-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_stream_queue:
- name: rabbit-ct-rabbit_stream_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_stream_queue
- run: |
- mkdir ct-rabbit_stream_queue-logs && chmod 777 ct-rabbit_stream_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_stream_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_stream_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_stream_queue-logs
- path: ct-rabbit_stream_queue-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_stream_queue [mixed v3.7.28]
- run: |
- mkdir ct-rabbit_stream_queue-logs-v3.7.28 && chmod 777 ct-rabbit_stream_queue-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_stream_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbit_stream_queue-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_stream_queue-logs-mixed-v3.7.28
- path: ct-rabbit_stream_queue-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbit_stream_queue [mixed v3.8.9]
- run: |
- mkdir ct-rabbit_stream_queue-logs-v3.8.9 && chmod 777 ct-rabbit_stream_queue-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_stream_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbit_stream_queue-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_stream_queue-logs-mixed-v3.8.9
- path: ct-rabbit_stream_queue-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbitmq_queues_cli_integration:
- name: rabbit-ct-rabbitmq_queues_cli_integration
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbitmq_queues_cli_integration
- run: |
- mkdir ct-rabbitmq_queues_cli_integration-logs && chmod 777 ct-rabbitmq_queues_cli_integration-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmq_queues_cli_integration \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbitmq_queues_cli_integration-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmq_queues_cli_integration-logs
- path: ct-rabbitmq_queues_cli_integration-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbitmq_queues_cli_integration [mixed v3.7.28]
- run: |
- mkdir ct-rabbitmq_queues_cli_integration-logs-v3.7.28 && chmod 777 ct-rabbitmq_queues_cli_integration-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmq_queues_cli_integration \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbitmq_queues_cli_integration-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmq_queues_cli_integration-logs-mixed-v3.7.28
- path: ct-rabbitmq_queues_cli_integration-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbitmq_queues_cli_integration [mixed v3.8.9]
- run: |
- mkdir ct-rabbitmq_queues_cli_integration-logs-v3.8.9 && chmod 777 ct-rabbitmq_queues_cli_integration-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmq_queues_cli_integration \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbitmq_queues_cli_integration-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmq_queues_cli_integration-logs-mixed-v3.8.9
- path: ct-rabbitmq_queues_cli_integration-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbitmqctl_integration:
- name: rabbit-ct-rabbitmqctl_integration
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbitmqctl_integration
- run: |
- mkdir ct-rabbitmqctl_integration-logs && chmod 777 ct-rabbitmqctl_integration-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmqctl_integration \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbitmqctl_integration-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmqctl_integration-logs
- path: ct-rabbitmqctl_integration-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbitmqctl_integration [mixed v3.7.28]
- run: |
- mkdir ct-rabbitmqctl_integration-logs-v3.7.28 && chmod 777 ct-rabbitmqctl_integration-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmqctl_integration \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbitmqctl_integration-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmqctl_integration-logs-mixed-v3.7.28
- path: ct-rabbitmqctl_integration-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbitmqctl_integration [mixed v3.8.9]
- run: |
- mkdir ct-rabbitmqctl_integration-logs-v3.8.9 && chmod 777 ct-rabbitmqctl_integration-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmqctl_integration \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbitmqctl_integration-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmqctl_integration-logs-mixed-v3.8.9
- path: ct-rabbitmqctl_integration-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-rabbitmqctl_shutdown:
- name: rabbit-ct-rabbitmqctl_shutdown
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbitmqctl_shutdown
- run: |
- mkdir ct-rabbitmqctl_shutdown-logs && chmod 777 ct-rabbitmqctl_shutdown-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmqctl_shutdown \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbitmqctl_shutdown-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmqctl_shutdown-logs
- path: ct-rabbitmqctl_shutdown-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbitmqctl_shutdown [mixed v3.7.28]
- run: |
- mkdir ct-rabbitmqctl_shutdown-logs-v3.7.28 && chmod 777 ct-rabbitmqctl_shutdown-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmqctl_shutdown \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-rabbitmqctl_shutdown-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmqctl_shutdown-logs-mixed-v3.7.28
- path: ct-rabbitmqctl_shutdown-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-rabbitmqctl_shutdown [mixed v3.8.9]
- run: |
- mkdir ct-rabbitmqctl_shutdown-logs-v3.8.9 && chmod 777 ct-rabbitmqctl_shutdown-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmqctl_shutdown \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-rabbitmqctl_shutdown-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmqctl_shutdown-logs-mixed-v3.8.9
- path: ct-rabbitmqctl_shutdown-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-signal_handling:
- name: rabbit-ct-signal_handling
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-signal_handling
- run: |
- mkdir ct-signal_handling-logs && chmod 777 ct-signal_handling-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=signal_handling \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-signal_handling-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-signal_handling-logs
- path: ct-signal_handling-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-signal_handling [mixed v3.7.28]
- run: |
- mkdir ct-signal_handling-logs-v3.7.28 && chmod 777 ct-signal_handling-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=signal_handling \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-signal_handling-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-signal_handling-logs-mixed-v3.7.28
- path: ct-signal_handling-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-signal_handling [mixed v3.8.9]
- run: |
- mkdir ct-signal_handling-logs-v3.8.9 && chmod 777 ct-signal_handling-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=signal_handling \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-signal_handling-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-signal_handling-logs-mixed-v3.8.9
- path: ct-signal_handling-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-simple_ha:
- name: rabbit-ct-simple_ha
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-simple_ha
- run: |
- mkdir ct-simple_ha-logs && chmod 777 ct-simple_ha-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=simple_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-simple_ha-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-simple_ha-logs
- path: ct-simple_ha-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-simple_ha [mixed v3.7.28]
- run: |
- mkdir ct-simple_ha-logs-v3.7.28 && chmod 777 ct-simple_ha-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=simple_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-simple_ha-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-simple_ha-logs-mixed-v3.7.28
- path: ct-simple_ha-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-simple_ha [mixed v3.8.9]
- run: |
- mkdir ct-simple_ha-logs-v3.8.9 && chmod 777 ct-simple_ha-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=simple_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-simple_ha-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-simple_ha-logs-mixed-v3.8.9
- path: ct-simple_ha-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-single_active_consumer:
- name: rabbit-ct-single_active_consumer
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-single_active_consumer
- run: |
- mkdir ct-single_active_consumer-logs && chmod 777 ct-single_active_consumer-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=single_active_consumer \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-single_active_consumer-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-single_active_consumer-logs
- path: ct-single_active_consumer-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-single_active_consumer [mixed v3.7.28]
- run: |
- mkdir ct-single_active_consumer-logs-v3.7.28 && chmod 777 ct-single_active_consumer-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=single_active_consumer \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-single_active_consumer-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-single_active_consumer-logs-mixed-v3.7.28
- path: ct-single_active_consumer-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-single_active_consumer [mixed v3.8.9]
- run: |
- mkdir ct-single_active_consumer-logs-v3.8.9 && chmod 777 ct-single_active_consumer-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=single_active_consumer \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-single_active_consumer-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-single_active_consumer-logs-mixed-v3.8.9
- path: ct-single_active_consumer-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-sync_detection:
- name: rabbit-ct-sync_detection
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-sync_detection
- run: |
- mkdir ct-sync_detection-logs && chmod 777 ct-sync_detection-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=sync_detection \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-sync_detection-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-sync_detection-logs
- path: ct-sync_detection-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-sync_detection [mixed v3.7.28]
- run: |
- mkdir ct-sync_detection-logs-v3.7.28 && chmod 777 ct-sync_detection-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=sync_detection \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-sync_detection-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-sync_detection-logs-mixed-v3.7.28
- path: ct-sync_detection-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-sync_detection [mixed v3.8.9]
- run: |
- mkdir ct-sync_detection-logs-v3.8.9 && chmod 777 ct-sync_detection-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=sync_detection \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-sync_detection-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-sync_detection-logs-mixed-v3.8.9
- path: ct-sync_detection-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-term_to_binary_compat_prop:
- name: rabbit-ct-term_to_binary_compat_prop
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-term_to_binary_compat_prop
- run: |
- mkdir ct-term_to_binary_compat_prop-logs && chmod 777 ct-term_to_binary_compat_prop-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=term_to_binary_compat_prop \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-term_to_binary_compat_prop-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-term_to_binary_compat_prop-logs
- path: ct-term_to_binary_compat_prop-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-term_to_binary_compat_prop [mixed v3.7.28]
- run: |
- mkdir ct-term_to_binary_compat_prop-logs-v3.7.28 && chmod 777 ct-term_to_binary_compat_prop-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=term_to_binary_compat_prop \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-term_to_binary_compat_prop-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-term_to_binary_compat_prop-logs-mixed-v3.7.28
- path: ct-term_to_binary_compat_prop-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-term_to_binary_compat_prop [mixed v3.8.9]
- run: |
- mkdir ct-term_to_binary_compat_prop-logs-v3.8.9 && chmod 777 ct-term_to_binary_compat_prop-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=term_to_binary_compat_prop \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-term_to_binary_compat_prop-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-term_to_binary_compat_prop-logs-mixed-v3.8.9
- path: ct-term_to_binary_compat_prop-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-topic_permission:
- name: rabbit-ct-topic_permission
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-topic_permission
- run: |
- mkdir ct-topic_permission-logs && chmod 777 ct-topic_permission-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=topic_permission \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-topic_permission-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-topic_permission-logs
- path: ct-topic_permission-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-topic_permission [mixed v3.7.28]
- run: |
- mkdir ct-topic_permission-logs-v3.7.28 && chmod 777 ct-topic_permission-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=topic_permission \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-topic_permission-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-topic_permission-logs-mixed-v3.7.28
- path: ct-topic_permission-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-topic_permission [mixed v3.8.9]
- run: |
- mkdir ct-topic_permission-logs-v3.8.9 && chmod 777 ct-topic_permission-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=topic_permission \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-topic_permission-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-topic_permission-logs-mixed-v3.8.9
- path: ct-topic_permission-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_access_control:
- name: rabbit-ct-unit_access_control
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_access_control
- run: |
- mkdir ct-unit_access_control-logs && chmod 777 ct-unit_access_control-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_access_control-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control-logs
- path: ct-unit_access_control-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_access_control [mixed v3.7.28]
- run: |
- mkdir ct-unit_access_control-logs-v3.7.28 && chmod 777 ct-unit_access_control-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_access_control-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control-logs-mixed-v3.7.28
- path: ct-unit_access_control-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_access_control [mixed v3.8.9]
- run: |
- mkdir ct-unit_access_control-logs-v3.8.9 && chmod 777 ct-unit_access_control-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_access_control-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control-logs-mixed-v3.8.9
- path: ct-unit_access_control-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_access_control_authn_authz_context_propagation:
- name: rabbit-ct-unit_access_control_authn_authz_context_propagation
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_access_control_authn_authz_context_propagation
- run: |
- mkdir ct-unit_access_control_authn_authz_context_propagation-logs && chmod 777 ct-unit_access_control_authn_authz_context_propagation-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control_authn_authz_context_propagation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_access_control_authn_authz_context_propagation-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control_authn_authz_context_propagation-logs
- path: ct-unit_access_control_authn_authz_context_propagation-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_access_control_authn_authz_context_propagation [mixed v3.7.28]
- run: |
- mkdir ct-unit_access_control_authn_authz_context_propagation-logs-v3.7.28 && chmod 777 ct-unit_access_control_authn_authz_context_propagation-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control_authn_authz_context_propagation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_access_control_authn_authz_context_propagation-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control_authn_authz_context_propagation-logs-mixed-v3.7.28
- path: ct-unit_access_control_authn_authz_context_propagation-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_access_control_authn_authz_context_propagation [mixed v3.8.9]
- run: |
- mkdir ct-unit_access_control_authn_authz_context_propagation-logs-v3.8.9 && chmod 777 ct-unit_access_control_authn_authz_context_propagation-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control_authn_authz_context_propagation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_access_control_authn_authz_context_propagation-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control_authn_authz_context_propagation-logs-mixed-v3.8.9
- path: ct-unit_access_control_authn_authz_context_propagation-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_access_control_credential_validation:
- name: rabbit-ct-unit_access_control_credential_validation
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_access_control_credential_validation
- run: |
- mkdir ct-unit_access_control_credential_validation-logs && chmod 777 ct-unit_access_control_credential_validation-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control_credential_validation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_access_control_credential_validation-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control_credential_validation-logs
- path: ct-unit_access_control_credential_validation-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_access_control_credential_validation [mixed v3.7.28]
- run: |
- mkdir ct-unit_access_control_credential_validation-logs-v3.7.28 && chmod 777 ct-unit_access_control_credential_validation-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control_credential_validation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_access_control_credential_validation-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control_credential_validation-logs-mixed-v3.7.28
- path: ct-unit_access_control_credential_validation-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_access_control_credential_validation [mixed v3.8.9]
- run: |
- mkdir ct-unit_access_control_credential_validation-logs-v3.8.9 && chmod 777 ct-unit_access_control_credential_validation-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control_credential_validation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_access_control_credential_validation-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control_credential_validation-logs-mixed-v3.8.9
- path: ct-unit_access_control_credential_validation-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_amqp091_content_framing:
- name: rabbit-ct-unit_amqp091_content_framing
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_amqp091_content_framing
- run: |
- mkdir ct-unit_amqp091_content_framing-logs && chmod 777 ct-unit_amqp091_content_framing-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_amqp091_content_framing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_amqp091_content_framing-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_amqp091_content_framing-logs
- path: ct-unit_amqp091_content_framing-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_amqp091_content_framing [mixed v3.7.28]
- run: |
- mkdir ct-unit_amqp091_content_framing-logs-v3.7.28 && chmod 777 ct-unit_amqp091_content_framing-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_amqp091_content_framing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_amqp091_content_framing-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_amqp091_content_framing-logs-mixed-v3.7.28
- path: ct-unit_amqp091_content_framing-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_amqp091_content_framing [mixed v3.8.9]
- run: |
- mkdir ct-unit_amqp091_content_framing-logs-v3.8.9 && chmod 777 ct-unit_amqp091_content_framing-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_amqp091_content_framing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_amqp091_content_framing-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_amqp091_content_framing-logs-mixed-v3.8.9
- path: ct-unit_amqp091_content_framing-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_amqp091_server_properties:
- name: rabbit-ct-unit_amqp091_server_properties
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_amqp091_server_properties
- run: |
- mkdir ct-unit_amqp091_server_properties-logs && chmod 777 ct-unit_amqp091_server_properties-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_amqp091_server_properties \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_amqp091_server_properties-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_amqp091_server_properties-logs
- path: ct-unit_amqp091_server_properties-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_amqp091_server_properties [mixed v3.7.28]
- run: |
- mkdir ct-unit_amqp091_server_properties-logs-v3.7.28 && chmod 777 ct-unit_amqp091_server_properties-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_amqp091_server_properties \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_amqp091_server_properties-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_amqp091_server_properties-logs-mixed-v3.7.28
- path: ct-unit_amqp091_server_properties-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_amqp091_server_properties [mixed v3.8.9]
- run: |
- mkdir ct-unit_amqp091_server_properties-logs-v3.8.9 && chmod 777 ct-unit_amqp091_server_properties-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_amqp091_server_properties \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_amqp091_server_properties-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_amqp091_server_properties-logs-mixed-v3.8.9
- path: ct-unit_amqp091_server_properties-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_app_management:
- name: rabbit-ct-unit_app_management
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_app_management
- run: |
- mkdir ct-unit_app_management-logs && chmod 777 ct-unit_app_management-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_app_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_app_management-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_app_management-logs
- path: ct-unit_app_management-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_app_management [mixed v3.7.28]
- run: |
- mkdir ct-unit_app_management-logs-v3.7.28 && chmod 777 ct-unit_app_management-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_app_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_app_management-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_app_management-logs-mixed-v3.7.28
- path: ct-unit_app_management-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_app_management [mixed v3.8.9]
- run: |
- mkdir ct-unit_app_management-logs-v3.8.9 && chmod 777 ct-unit_app_management-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_app_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_app_management-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_app_management-logs-mixed-v3.8.9
- path: ct-unit_app_management-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_cluster_formation_locking_mocks:
- name: rabbit-ct-unit_cluster_formation_locking_mocks
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_cluster_formation_locking_mocks
- run: |
- mkdir ct-unit_cluster_formation_locking_mocks-logs && chmod 777 ct-unit_cluster_formation_locking_mocks-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_cluster_formation_locking_mocks \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_cluster_formation_locking_mocks-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_cluster_formation_locking_mocks-logs
- path: ct-unit_cluster_formation_locking_mocks-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_cluster_formation_locking_mocks [mixed v3.7.28]
- run: |
- mkdir ct-unit_cluster_formation_locking_mocks-logs-v3.7.28 && chmod 777 ct-unit_cluster_formation_locking_mocks-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_cluster_formation_locking_mocks \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_cluster_formation_locking_mocks-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_cluster_formation_locking_mocks-logs-mixed-v3.7.28
- path: ct-unit_cluster_formation_locking_mocks-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_cluster_formation_locking_mocks [mixed v3.8.9]
- run: |
- mkdir ct-unit_cluster_formation_locking_mocks-logs-v3.8.9 && chmod 777 ct-unit_cluster_formation_locking_mocks-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_cluster_formation_locking_mocks \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_cluster_formation_locking_mocks-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_cluster_formation_locking_mocks-logs-mixed-v3.8.9
- path: ct-unit_cluster_formation_locking_mocks-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_collections:
- name: rabbit-ct-unit_collections
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_collections
- run: |
- mkdir ct-unit_collections-logs && chmod 777 ct-unit_collections-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_collections \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_collections-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_collections-logs
- path: ct-unit_collections-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_collections [mixed v3.7.28]
- run: |
- mkdir ct-unit_collections-logs-v3.7.28 && chmod 777 ct-unit_collections-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_collections \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_collections-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_collections-logs-mixed-v3.7.28
- path: ct-unit_collections-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_collections [mixed v3.8.9]
- run: |
- mkdir ct-unit_collections-logs-v3.8.9 && chmod 777 ct-unit_collections-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_collections \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_collections-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_collections-logs-mixed-v3.8.9
- path: ct-unit_collections-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_config_value_encryption:
- name: rabbit-ct-unit_config_value_encryption
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_config_value_encryption
- run: |
- mkdir ct-unit_config_value_encryption-logs && chmod 777 ct-unit_config_value_encryption-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_config_value_encryption \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_config_value_encryption-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_config_value_encryption-logs
- path: ct-unit_config_value_encryption-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_config_value_encryption [mixed v3.7.28]
- run: |
- mkdir ct-unit_config_value_encryption-logs-v3.7.28 && chmod 777 ct-unit_config_value_encryption-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_config_value_encryption \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_config_value_encryption-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_config_value_encryption-logs-mixed-v3.7.28
- path: ct-unit_config_value_encryption-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_config_value_encryption [mixed v3.8.9]
- run: |
- mkdir ct-unit_config_value_encryption-logs-v3.8.9 && chmod 777 ct-unit_config_value_encryption-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_config_value_encryption \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_config_value_encryption-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_config_value_encryption-logs-mixed-v3.8.9
- path: ct-unit_config_value_encryption-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_connection_tracking:
- name: rabbit-ct-unit_connection_tracking
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_connection_tracking
- run: |
- mkdir ct-unit_connection_tracking-logs && chmod 777 ct-unit_connection_tracking-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_connection_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_connection_tracking-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_connection_tracking-logs
- path: ct-unit_connection_tracking-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_connection_tracking [mixed v3.7.28]
- run: |
- mkdir ct-unit_connection_tracking-logs-v3.7.28 && chmod 777 ct-unit_connection_tracking-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_connection_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_connection_tracking-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_connection_tracking-logs-mixed-v3.7.28
- path: ct-unit_connection_tracking-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_connection_tracking [mixed v3.8.9]
- run: |
- mkdir ct-unit_connection_tracking-logs-v3.8.9 && chmod 777 ct-unit_connection_tracking-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_connection_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_connection_tracking-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_connection_tracking-logs-mixed-v3.8.9
- path: ct-unit_connection_tracking-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_credit_flow:
- name: rabbit-ct-unit_credit_flow
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_credit_flow
- run: |
- mkdir ct-unit_credit_flow-logs && chmod 777 ct-unit_credit_flow-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_credit_flow \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_credit_flow-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_credit_flow-logs
- path: ct-unit_credit_flow-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_credit_flow [mixed v3.7.28]
- run: |
- mkdir ct-unit_credit_flow-logs-v3.7.28 && chmod 777 ct-unit_credit_flow-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_credit_flow \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_credit_flow-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_credit_flow-logs-mixed-v3.7.28
- path: ct-unit_credit_flow-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_credit_flow [mixed v3.8.9]
- run: |
- mkdir ct-unit_credit_flow-logs-v3.8.9 && chmod 777 ct-unit_credit_flow-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_credit_flow \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_credit_flow-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_credit_flow-logs-mixed-v3.8.9
- path: ct-unit_credit_flow-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_disk_monitor:
- name: rabbit-ct-unit_disk_monitor
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_disk_monitor
- run: |
- mkdir ct-unit_disk_monitor-logs && chmod 777 ct-unit_disk_monitor-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_disk_monitor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_disk_monitor-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_disk_monitor-logs
- path: ct-unit_disk_monitor-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_disk_monitor [mixed v3.7.28]
- run: |
- mkdir ct-unit_disk_monitor-logs-v3.7.28 && chmod 777 ct-unit_disk_monitor-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_disk_monitor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_disk_monitor-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_disk_monitor-logs-mixed-v3.7.28
- path: ct-unit_disk_monitor-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_disk_monitor [mixed v3.8.9]
- run: |
- mkdir ct-unit_disk_monitor-logs-v3.8.9 && chmod 777 ct-unit_disk_monitor-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_disk_monitor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_disk_monitor-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_disk_monitor-logs-mixed-v3.8.9
- path: ct-unit_disk_monitor-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_disk_monitor_mocks:
- name: rabbit-ct-unit_disk_monitor_mocks
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_disk_monitor_mocks
- run: |
- mkdir ct-unit_disk_monitor_mocks-logs && chmod 777 ct-unit_disk_monitor_mocks-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_disk_monitor_mocks \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_disk_monitor_mocks-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_disk_monitor_mocks-logs
- path: ct-unit_disk_monitor_mocks-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_disk_monitor_mocks [mixed v3.7.28]
- run: |
- mkdir ct-unit_disk_monitor_mocks-logs-v3.7.28 && chmod 777 ct-unit_disk_monitor_mocks-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_disk_monitor_mocks \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_disk_monitor_mocks-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_disk_monitor_mocks-logs-mixed-v3.7.28
- path: ct-unit_disk_monitor_mocks-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_disk_monitor_mocks [mixed v3.8.9]
- run: |
- mkdir ct-unit_disk_monitor_mocks-logs-v3.8.9 && chmod 777 ct-unit_disk_monitor_mocks-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_disk_monitor_mocks \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_disk_monitor_mocks-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_disk_monitor_mocks-logs-mixed-v3.8.9
- path: ct-unit_disk_monitor_mocks-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_file_handle_cache:
- name: rabbit-ct-unit_file_handle_cache
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_file_handle_cache
- run: |
- mkdir ct-unit_file_handle_cache-logs && chmod 777 ct-unit_file_handle_cache-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_file_handle_cache \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_file_handle_cache-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_file_handle_cache-logs
- path: ct-unit_file_handle_cache-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_file_handle_cache [mixed v3.7.28]
- run: |
- mkdir ct-unit_file_handle_cache-logs-v3.7.28 && chmod 777 ct-unit_file_handle_cache-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_file_handle_cache \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_file_handle_cache-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_file_handle_cache-logs-mixed-v3.7.28
- path: ct-unit_file_handle_cache-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_file_handle_cache [mixed v3.8.9]
- run: |
- mkdir ct-unit_file_handle_cache-logs-v3.8.9 && chmod 777 ct-unit_file_handle_cache-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_file_handle_cache \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_file_handle_cache-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_file_handle_cache-logs-mixed-v3.8.9
- path: ct-unit_file_handle_cache-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_gen_server2:
- name: rabbit-ct-unit_gen_server2
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_gen_server2
- run: |
- mkdir ct-unit_gen_server2-logs && chmod 777 ct-unit_gen_server2-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_gen_server2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_gen_server2-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_gen_server2-logs
- path: ct-unit_gen_server2-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_gen_server2 [mixed v3.7.28]
- run: |
- mkdir ct-unit_gen_server2-logs-v3.7.28 && chmod 777 ct-unit_gen_server2-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_gen_server2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_gen_server2-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_gen_server2-logs-mixed-v3.7.28
- path: ct-unit_gen_server2-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_gen_server2 [mixed v3.8.9]
- run: |
- mkdir ct-unit_gen_server2-logs-v3.8.9 && chmod 777 ct-unit_gen_server2-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_gen_server2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_gen_server2-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_gen_server2-logs-mixed-v3.8.9
- path: ct-unit_gen_server2-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_gm:
- name: rabbit-ct-unit_gm
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_gm
- run: |
- mkdir ct-unit_gm-logs && chmod 777 ct-unit_gm-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_gm \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_gm-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_gm-logs
- path: ct-unit_gm-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_gm [mixed v3.7.28]
- run: |
- mkdir ct-unit_gm-logs-v3.7.28 && chmod 777 ct-unit_gm-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_gm \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_gm-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_gm-logs-mixed-v3.7.28
- path: ct-unit_gm-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_gm [mixed v3.8.9]
- run: |
- mkdir ct-unit_gm-logs-v3.8.9 && chmod 777 ct-unit_gm-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_gm \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_gm-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_gm-logs-mixed-v3.8.9
- path: ct-unit_gm-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_log_config:
- name: rabbit-ct-unit_log_config
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_log_config
- run: |
- mkdir ct-unit_log_config-logs && chmod 777 ct-unit_log_config-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_log_config \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_log_config-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_log_config-logs
- path: ct-unit_log_config-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_log_config [mixed v3.7.28]
- run: |
- mkdir ct-unit_log_config-logs-v3.7.28 && chmod 777 ct-unit_log_config-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_log_config \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_log_config-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_log_config-logs-mixed-v3.7.28
- path: ct-unit_log_config-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_log_config [mixed v3.8.9]
- run: |
- mkdir ct-unit_log_config-logs-v3.8.9 && chmod 777 ct-unit_log_config-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_log_config \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_log_config-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_log_config-logs-mixed-v3.8.9
- path: ct-unit_log_config-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_log_management:
- name: rabbit-ct-unit_log_management
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_log_management
- run: |
- mkdir ct-unit_log_management-logs && chmod 777 ct-unit_log_management-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_log_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_log_management-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_log_management-logs
- path: ct-unit_log_management-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_log_management [mixed v3.7.28]
- run: |
- mkdir ct-unit_log_management-logs-v3.7.28 && chmod 777 ct-unit_log_management-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_log_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_log_management-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_log_management-logs-mixed-v3.7.28
- path: ct-unit_log_management-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_log_management [mixed v3.8.9]
- run: |
- mkdir ct-unit_log_management-logs-v3.8.9 && chmod 777 ct-unit_log_management-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_log_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_log_management-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_log_management-logs-mixed-v3.8.9
- path: ct-unit_log_management-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_operator_policy:
- name: rabbit-ct-unit_operator_policy
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_operator_policy
- run: |
- mkdir ct-unit_operator_policy-logs && chmod 777 ct-unit_operator_policy-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_operator_policy \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_operator_policy-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_operator_policy-logs
- path: ct-unit_operator_policy-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_operator_policy [mixed v3.7.28]
- run: |
- mkdir ct-unit_operator_policy-logs-v3.7.28 && chmod 777 ct-unit_operator_policy-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_operator_policy \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_operator_policy-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_operator_policy-logs-mixed-v3.7.28
- path: ct-unit_operator_policy-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_operator_policy [mixed v3.8.9]
- run: |
- mkdir ct-unit_operator_policy-logs-v3.8.9 && chmod 777 ct-unit_operator_policy-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_operator_policy \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_operator_policy-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_operator_policy-logs-mixed-v3.8.9
- path: ct-unit_operator_policy-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_pg_local:
- name: rabbit-ct-unit_pg_local
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_pg_local
- run: |
- mkdir ct-unit_pg_local-logs && chmod 777 ct-unit_pg_local-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_pg_local \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_pg_local-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_pg_local-logs
- path: ct-unit_pg_local-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_pg_local [mixed v3.7.28]
- run: |
- mkdir ct-unit_pg_local-logs-v3.7.28 && chmod 777 ct-unit_pg_local-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_pg_local \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_pg_local-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_pg_local-logs-mixed-v3.7.28
- path: ct-unit_pg_local-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_pg_local [mixed v3.8.9]
- run: |
- mkdir ct-unit_pg_local-logs-v3.8.9 && chmod 777 ct-unit_pg_local-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_pg_local \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_pg_local-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_pg_local-logs-mixed-v3.8.9
- path: ct-unit_pg_local-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_plugin_directories:
- name: rabbit-ct-unit_plugin_directories
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_plugin_directories
- run: |
- mkdir ct-unit_plugin_directories-logs && chmod 777 ct-unit_plugin_directories-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_plugin_directories \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_plugin_directories-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_plugin_directories-logs
- path: ct-unit_plugin_directories-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_plugin_directories [mixed v3.7.28]
- run: |
- mkdir ct-unit_plugin_directories-logs-v3.7.28 && chmod 777 ct-unit_plugin_directories-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_plugin_directories \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_plugin_directories-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_plugin_directories-logs-mixed-v3.7.28
- path: ct-unit_plugin_directories-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_plugin_directories [mixed v3.8.9]
- run: |
- mkdir ct-unit_plugin_directories-logs-v3.8.9 && chmod 777 ct-unit_plugin_directories-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_plugin_directories \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_plugin_directories-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_plugin_directories-logs-mixed-v3.8.9
- path: ct-unit_plugin_directories-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_plugin_versioning:
- name: rabbit-ct-unit_plugin_versioning
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_plugin_versioning
- run: |
- mkdir ct-unit_plugin_versioning-logs && chmod 777 ct-unit_plugin_versioning-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_plugin_versioning \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_plugin_versioning-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_plugin_versioning-logs
- path: ct-unit_plugin_versioning-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_plugin_versioning [mixed v3.7.28]
- run: |
- mkdir ct-unit_plugin_versioning-logs-v3.7.28 && chmod 777 ct-unit_plugin_versioning-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_plugin_versioning \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_plugin_versioning-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_plugin_versioning-logs-mixed-v3.7.28
- path: ct-unit_plugin_versioning-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_plugin_versioning [mixed v3.8.9]
- run: |
- mkdir ct-unit_plugin_versioning-logs-v3.8.9 && chmod 777 ct-unit_plugin_versioning-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_plugin_versioning \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_plugin_versioning-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_plugin_versioning-logs-mixed-v3.8.9
- path: ct-unit_plugin_versioning-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_policy_validators:
- name: rabbit-ct-unit_policy_validators
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_policy_validators
- run: |
- mkdir ct-unit_policy_validators-logs && chmod 777 ct-unit_policy_validators-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_policy_validators \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_policy_validators-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_policy_validators-logs
- path: ct-unit_policy_validators-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_policy_validators [mixed v3.7.28]
- run: |
- mkdir ct-unit_policy_validators-logs-v3.7.28 && chmod 777 ct-unit_policy_validators-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_policy_validators \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_policy_validators-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_policy_validators-logs-mixed-v3.7.28
- path: ct-unit_policy_validators-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_policy_validators [mixed v3.8.9]
- run: |
- mkdir ct-unit_policy_validators-logs-v3.8.9 && chmod 777 ct-unit_policy_validators-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_policy_validators \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_policy_validators-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_policy_validators-logs-mixed-v3.8.9
- path: ct-unit_policy_validators-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_priority_queue:
- name: rabbit-ct-unit_priority_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_priority_queue
- run: |
- mkdir ct-unit_priority_queue-logs && chmod 777 ct-unit_priority_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_priority_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_priority_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_priority_queue-logs
- path: ct-unit_priority_queue-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_priority_queue [mixed v3.7.28]
- run: |
- mkdir ct-unit_priority_queue-logs-v3.7.28 && chmod 777 ct-unit_priority_queue-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_priority_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_priority_queue-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_priority_queue-logs-mixed-v3.7.28
- path: ct-unit_priority_queue-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_priority_queue [mixed v3.8.9]
- run: |
- mkdir ct-unit_priority_queue-logs-v3.8.9 && chmod 777 ct-unit_priority_queue-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_priority_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_priority_queue-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_priority_queue-logs-mixed-v3.8.9
- path: ct-unit_priority_queue-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_queue_consumers:
- name: rabbit-ct-unit_queue_consumers
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_queue_consumers
- run: |
- mkdir ct-unit_queue_consumers-logs && chmod 777 ct-unit_queue_consumers-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_queue_consumers \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_queue_consumers-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_queue_consumers-logs
- path: ct-unit_queue_consumers-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_queue_consumers [mixed v3.7.28]
- run: |
- mkdir ct-unit_queue_consumers-logs-v3.7.28 && chmod 777 ct-unit_queue_consumers-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_queue_consumers \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_queue_consumers-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_queue_consumers-logs-mixed-v3.7.28
- path: ct-unit_queue_consumers-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_queue_consumers [mixed v3.8.9]
- run: |
- mkdir ct-unit_queue_consumers-logs-v3.8.9 && chmod 777 ct-unit_queue_consumers-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_queue_consumers \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_queue_consumers-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_queue_consumers-logs-mixed-v3.8.9
- path: ct-unit_queue_consumers-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_stats_and_metrics:
- name: rabbit-ct-unit_stats_and_metrics
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_stats_and_metrics
- run: |
- mkdir ct-unit_stats_and_metrics-logs && chmod 777 ct-unit_stats_and_metrics-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_stats_and_metrics \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_stats_and_metrics-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_stats_and_metrics-logs
- path: ct-unit_stats_and_metrics-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_stats_and_metrics [mixed v3.7.28]
- run: |
- mkdir ct-unit_stats_and_metrics-logs-v3.7.28 && chmod 777 ct-unit_stats_and_metrics-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_stats_and_metrics \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_stats_and_metrics-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_stats_and_metrics-logs-mixed-v3.7.28
- path: ct-unit_stats_and_metrics-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_stats_and_metrics [mixed v3.8.9]
- run: |
- mkdir ct-unit_stats_and_metrics-logs-v3.8.9 && chmod 777 ct-unit_stats_and_metrics-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_stats_and_metrics \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_stats_and_metrics-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_stats_and_metrics-logs-mixed-v3.8.9
- path: ct-unit_stats_and_metrics-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_supervisor2:
- name: rabbit-ct-unit_supervisor2
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_supervisor2
- run: |
- mkdir ct-unit_supervisor2-logs && chmod 777 ct-unit_supervisor2-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_supervisor2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_supervisor2-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_supervisor2-logs
- path: ct-unit_supervisor2-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_supervisor2 [mixed v3.7.28]
- run: |
- mkdir ct-unit_supervisor2-logs-v3.7.28 && chmod 777 ct-unit_supervisor2-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_supervisor2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_supervisor2-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_supervisor2-logs-mixed-v3.7.28
- path: ct-unit_supervisor2-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_supervisor2 [mixed v3.8.9]
- run: |
- mkdir ct-unit_supervisor2-logs-v3.8.9 && chmod 777 ct-unit_supervisor2-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_supervisor2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_supervisor2-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_supervisor2-logs-mixed-v3.8.9
- path: ct-unit_supervisor2-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-unit_vm_memory_monitor:
- name: rabbit-ct-unit_vm_memory_monitor
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_vm_memory_monitor
- run: |
- mkdir ct-unit_vm_memory_monitor-logs && chmod 777 ct-unit_vm_memory_monitor-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_vm_memory_monitor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_vm_memory_monitor-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_vm_memory_monitor-logs
- path: ct-unit_vm_memory_monitor-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_vm_memory_monitor [mixed v3.7.28]
- run: |
- mkdir ct-unit_vm_memory_monitor-logs-v3.7.28 && chmod 777 ct-unit_vm_memory_monitor-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_vm_memory_monitor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-unit_vm_memory_monitor-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_vm_memory_monitor-logs-mixed-v3.7.28
- path: ct-unit_vm_memory_monitor-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-unit_vm_memory_monitor [mixed v3.8.9]
- run: |
- mkdir ct-unit_vm_memory_monitor-logs-v3.8.9 && chmod 777 ct-unit_vm_memory_monitor-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_vm_memory_monitor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-unit_vm_memory_monitor-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_vm_memory_monitor-logs-mixed-v3.8.9
- path: ct-unit_vm_memory_monitor-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-upgrade_preparation:
- name: rabbit-ct-upgrade_preparation
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-upgrade_preparation
- run: |
- mkdir ct-upgrade_preparation-logs && chmod 777 ct-upgrade_preparation-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=upgrade_preparation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-upgrade_preparation-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-upgrade_preparation-logs
- path: ct-upgrade_preparation-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-upgrade_preparation [mixed v3.7.28]
- run: |
- mkdir ct-upgrade_preparation-logs-v3.7.28 && chmod 777 ct-upgrade_preparation-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=upgrade_preparation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-upgrade_preparation-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-upgrade_preparation-logs-mixed-v3.7.28
- path: ct-upgrade_preparation-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-upgrade_preparation [mixed v3.8.9]
- run: |
- mkdir ct-upgrade_preparation-logs-v3.8.9 && chmod 777 ct-upgrade_preparation-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=upgrade_preparation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-upgrade_preparation-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-upgrade_preparation-logs-mixed-v3.8.9
- path: ct-upgrade_preparation-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit-ct-vhost:
- name: rabbit-ct-vhost
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- - name: RUN ct-vhost
- run: |
- mkdir ct-vhost-logs && chmod 777 ct-vhost-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=vhost \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-vhost-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-vhost-logs
- path: ct-vhost-logs/*-ct-logs-*.tar.xz
- - name: RUN ct-vhost [mixed v3.7.28]
- run: |
- mkdir ct-vhost-logs-v3.7.28 && chmod 777 ct-vhost-logs-v3.7.28
- docker run \
- --env project=rabbit \
- --env CT_SUITE=vhost \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.7.28 \
- --volume ${PWD}/ct-vhost-logs-v3.7.28:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.7.28]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-vhost-logs-mixed-v3.7.28
- path: ct-vhost-logs-v3.7.28/*-ct-logs-*.tar.xz
- - name: RUN ct-vhost [mixed v3.8.9]
- run: |
- mkdir ct-vhost-logs-v3.8.9 && chmod 777 ct-vhost-logs-v3.8.9
- docker run \
- --env project=rabbit \
- --env CT_SUITE=vhost \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=v3.8.9 \
- --volume ${PWD}/ct-vhost-logs-v3.8.9:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed v3.8.9]
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-vhost-logs-mixed-v3.8.9
- path: ct-vhost-logs-v3.8.9/*-ct-logs-*.tar.xz
- rabbit:
- name: rabbit
- needs:
- - rabbit-checks
- - rabbit-ct-amqqueue_backward_compatibility
- - rabbit-ct-backing_queue
- - rabbit-ct-channel_interceptor
- - rabbit-ct-channel_operation_timeout
- - rabbit-ct-cluster
- - rabbit-ct-cluster_rename
- - rabbit-ct-clustering_management
- - rabbit-ct-config_schema
- - rabbit-ct-confirms_rejects
- - rabbit-ct-consumer_timeout
- - rabbit-ct-crashing_queues
- - rabbit-ct-dead_lettering
- - rabbit-ct-definition_import
- - rabbit-ct-disconnect_detected_during_alarm
- - rabbit-ct-dynamic_ha
- - rabbit-ct-dynamic_qq
- - rabbit-ct-eager_sync
- - rabbit-ct-feature_flags
- - rabbit-ct-lazy_queue
- - rabbit-ct-list_consumers_sanity_check
- - rabbit-ct-list_queues_online_and_offline
- - rabbit-ct-maintenance_mode
- - rabbit-ct-many_node_ha
- - rabbit-ct-message_size_limit
- - rabbit-ct-metrics
- - rabbit-ct-mirrored_supervisor
- - rabbit-ct-msg_store
- - rabbit-ct-peer_discovery_classic_config
- - rabbit-ct-peer_discovery_dns
- - rabbit-ct-per_user_connection_channel_limit
- - rabbit-ct-per_user_connection_channel_limit_partitions
- - rabbit-ct-per_user_connection_channel_tracking
- - rabbit-ct-per_user_connection_tracking
- - rabbit-ct-per_vhost_connection_limit
- - rabbit-ct-per_vhost_connection_limit_partitions
- - rabbit-ct-per_vhost_msg_store
- - rabbit-ct-per_vhost_queue_limit
- - rabbit-ct-policy
- - rabbit-ct-priority_queue
- - rabbit-ct-priority_queue_recovery
- - rabbit-ct-product_info
- - rabbit-ct-proxy_protocol
- - rabbit-ct-publisher_confirms_parallel
- - rabbit-ct-queue_length_limits
- - rabbit-ct-queue_master_location
- - rabbit-ct-queue_parallel
- - rabbit-ct-queue_type
- - rabbit-ct-quorum_queue
- - rabbit-ct-rabbit_confirms
- - rabbit-ct-rabbit_core_metrics_gc
- - rabbit-ct-rabbit_fifo
- - rabbit-ct-rabbit_fifo_int
- - rabbit-ct-rabbit_fifo_prop
- - rabbit-ct-rabbit_fifo_v0
- - rabbit-ct-rabbit_msg_record
- - rabbit-ct-rabbit_stream_queue
- - rabbit-ct-rabbitmq_queues_cli_integration
- - rabbit-ct-rabbitmqctl_integration
- - rabbit-ct-rabbitmqctl_shutdown
- - rabbit-ct-signal_handling
- - rabbit-ct-simple_ha
- - rabbit-ct-single_active_consumer
- - rabbit-ct-sync_detection
- - rabbit-ct-term_to_binary_compat_prop
- - rabbit-ct-topic_permission
- - rabbit-ct-unit_access_control
- - rabbit-ct-unit_access_control_authn_authz_context_propagation
- - rabbit-ct-unit_access_control_credential_validation
- - rabbit-ct-unit_amqp091_content_framing
- - rabbit-ct-unit_amqp091_server_properties
- - rabbit-ct-unit_app_management
- - rabbit-ct-unit_cluster_formation_locking_mocks
- - rabbit-ct-unit_collections
- - rabbit-ct-unit_config_value_encryption
- - rabbit-ct-unit_connection_tracking
- - rabbit-ct-unit_credit_flow
- - rabbit-ct-unit_disk_monitor
- - rabbit-ct-unit_disk_monitor_mocks
- - rabbit-ct-unit_file_handle_cache
- - rabbit-ct-unit_gen_server2
- - rabbit-ct-unit_gm
- - rabbit-ct-unit_log_config
- - rabbit-ct-unit_log_management
- - rabbit-ct-unit_operator_policy
- - rabbit-ct-unit_pg_local
- - rabbit-ct-unit_plugin_directories
- - rabbit-ct-unit_plugin_versioning
- - rabbit-ct-unit_policy_validators
- - rabbit-ct-unit_priority_queue
- - rabbit-ct-unit_queue_consumers
- - rabbit-ct-unit_stats_and_metrics
- - rabbit-ct-unit_supervisor2
- - rabbit-ct-unit_vm_memory_monitor
- - rabbit-ct-upgrade_preparation
- - rabbit-ct-vhost
- runs-on: ubuntu-18.04
- if: always()
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD STEP FINISH
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }}
- docker run \
- --env project=rabbit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ needs.rabbit-checks.outputs.step_start }} \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-22.3-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/collect.sh
- amqp_client:
- name: amqp_client
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=amqp_client \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: amqp_client-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- amqp10_common:
- name: amqp10_common
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=amqp10_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: amqp10_common-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_auth_backend_cache:
- name: rabbitmq_auth_backend_cache
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_auth_backend_cache \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_auth_backend_cache-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_auth_backend_http:
- name: rabbitmq_auth_backend_http
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_auth_backend_http \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_auth_backend_http-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_auth_backend_oauth2:
- name: rabbitmq_auth_backend_oauth2
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_auth_backend_oauth2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_auth_backend_oauth2-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_auth_mechanism_ssl:
- name: rabbitmq_auth_mechanism_ssl
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_auth_mechanism_ssl \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_auth_mechanism_ssl-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_aws:
- name: rabbitmq_aws
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_aws \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_aws-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_cli:
- name: rabbitmq_cli
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir broker-logs && chmod 777 broker-logs
- docker run \
- --env project=rabbitmq_cli \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/broker-logs:/workspace/broker-logs \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/rabbitmq_cli.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_cli-broker-logs
- path: broker-logs/broker-logs.tar.xz
- rabbitmq_consistent_hash_exchange:
- name: rabbitmq_consistent_hash_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_consistent_hash_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_consistent_hash_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_event_exchange:
- name: rabbitmq_event_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_event_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_event_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_federation:
- name: rabbitmq_federation
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_federation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_federation-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_federation_management:
- name: rabbitmq_federation_management
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_federation_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_federation_management-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_jms_topic_exchange:
- name: rabbitmq_jms_topic_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_jms_topic_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_jms_topic_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_management:
- name: rabbitmq_management
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_management-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_management_agent:
- name: rabbitmq_management_agent
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_management_agent \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_management_agent-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_peer_discovery_common:
- name: rabbitmq_peer_discovery_common
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_peer_discovery_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_peer_discovery_common-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_peer_discovery_k8s:
- name: rabbitmq_peer_discovery_k8s
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_peer_discovery_k8s \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_peer_discovery_k8s-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_peer_discovery_consul:
- name: rabbitmq_peer_discovery_consul
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_peer_discovery_consul \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_peer_discovery_consul-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_prometheus:
- name: rabbitmq_prometheus
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_prometheus \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_prometheus-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_random_exchange:
- name: rabbitmq_random_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_random_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_random_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_recent_history_exchange:
- name: rabbitmq_recent_history_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_recent_history_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_recent_history_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_sharding:
- name: rabbitmq_sharding
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_sharding \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_sharding-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_shovel:
- name: rabbitmq_shovel
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_shovel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_shovel-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_shovel_management:
- name: rabbitmq_shovel_management
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_shovel_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_shovel_management-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_stomp:
- name: rabbitmq_stomp
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_stomp \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_stomp-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_top:
- name: rabbitmq_top
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_top \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_top-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_tracing:
- name: rabbitmq_tracing
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_tracing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_tracing-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_trust_store:
- name: rabbitmq_trust_store
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_trust_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_trust_store-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_dispatch:
- name: rabbitmq_web_dispatch
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_dispatch \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_dispatch-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_mqtt:
- name: rabbitmq_web_mqtt
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_mqtt \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_mqtt-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_mqtt_examples:
- name: rabbitmq_web_mqtt_examples
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_mqtt_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_mqtt_examples-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_stomp:
- name: rabbitmq_web_stomp
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_stomp \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_stomp-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_stomp_examples:
- name: rabbitmq_web_stomp_examples
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_stomp_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_stomp_examples-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- package-generic-unix:
- name: package-generic-unix
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PACKAGE GENERIC UNIX
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- mkdir PACKAGES && chmod 777 PACKAGES
- docker run \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/PACKAGES:/workspace/rabbitmq/PACKAGES \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/package_generic_unix.sh
- - name: UPLOAD GENERIC UNIX PACKAGE ARTIFACT
- uses: actions/upload-artifact@v2-preview
- with:
- name: rabbitmq-server-generic-unix.tar.xz
- path: PACKAGES/rabbitmq-server-generic-unix-*.tar.xz
- - name: RECORD STEP FINISH
- if: always()
- run: |
- docker run \
- --env project=packaging \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/collect.sh
- finish:
- name: finish
- needs:
- - prepare
- - xref
- - rabbit_common
- - rabbit
- - amqp_client
- - amqp10_common
- - rabbitmq_auth_backend_cache
- - rabbitmq_auth_backend_http
- - rabbitmq_auth_backend_oauth2
- - rabbitmq_auth_mechanism_ssl
- - rabbitmq_aws
- - rabbitmq_cli
- - rabbitmq_consistent_hash_exchange
- - rabbitmq_event_exchange
- - rabbitmq_federation
- - rabbitmq_federation_management
- - rabbitmq_jms_topic_exchange
- - rabbitmq_management
- - rabbitmq_management_agent
- - rabbitmq_peer_discovery_common
- - rabbitmq_peer_discovery_k8s
- - rabbitmq_peer_discovery_consul
- - rabbitmq_prometheus
- - rabbitmq_random_exchange
- - rabbitmq_recent_history_exchange
- - rabbitmq_sharding
- - rabbitmq_shovel
- - rabbitmq_shovel_management
- - rabbitmq_stomp
- - rabbitmq_top
- - rabbitmq_tracing
- - rabbitmq_trust_store
- - rabbitmq_web_dispatch
- - rabbitmq_web_mqtt
- - rabbitmq_web_mqtt_examples
- - rabbitmq_web_stomp
- - rabbitmq_web_stomp_examples
- - package-generic-unix
- runs-on: ubuntu-18.04
- if: always()
- steps:
- - uses: technote-space/workflow-conclusion-action@v1
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD BUILD FINISH
- run: |
- echo -n "${{ env.WORKFLOW_CONCLUSION }}" > conclusion
-
- gsutil cp conclusion \
- 'gs://monorepo_github_actions_conclusions/${{ github.sha }}/${{ github.workflow }}'
-
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }}
- docker run \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env BUILD_START=${{ needs.prepare.outputs.build_start }} \
- --env BUILD_RESULT=${{ env.WORKFLOW_CONCLUSION }} \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-22.3-rabbitmq-${{ github.sha }} \
- ci/scripts/finish.sh
diff --git a/.github/workflows/test-erlang-otp-23.1.yaml b/.github/workflows/test-erlang-otp-23.1.yaml
deleted file mode 100644
index 74c8664206..0000000000
--- a/.github/workflows/test-erlang-otp-23.1.yaml
+++ /dev/null
@@ -1,5099 +0,0 @@
-name: Test - Erlang 23.1
-on: push
-jobs:
- prepare:
- name: prepare
- runs-on: ubuntu-18.04
- outputs:
- build_start: ${{ steps.buildevents.outputs.build_start }}
- branch_or_tag_name: ${{ steps.buildevents.outputs.branch_or_tag_name }}
- steps:
- - name: RECORD BUILD START
- id: buildevents
- run: |
- echo "::set-output name=build_start::$(date +%s)"
- branch_or_tag_name=${GITHUB_REF#refs/*/}
- echo "::set-output name=branch_or_tag_name::$branch_or_tag_name"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: PREPARE BUILD IMAGE
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci
- dockerfile: ci/dockerfiles/ci
- build_args: ERLANG_VERSION=23.1,GITHUB_RUN_ID=${{ github.run_id }},BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }},GITHUB_SHA=${{ github.sha }},base_rmq_ref=master,current_rmq_ref=${{ steps.buildevents.outputs.branch_or_tag_name }},RABBITMQ_VERSION=3.9.0
- tags: erlang-23.1-rabbitmq-${{ github.sha }}
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD STEP FINISH
- run: |
- docker run \
- --env project=prepare \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.build_start }} \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/collect.sh
- xref:
- name: xref
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD XREF START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-base:23.1
- - name: RUN XREF rabbit_common
- run: |
- docker run \
- --env project=rabbit_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbit
- run: |
- docker run \
- --env project=rabbit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF amqp_client
- run: |
- docker run \
- --env project=amqp_client \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF amqp10_client
- run: |
- docker run \
- --env project=amqp10_client \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF amqp10_common
- run: |
- docker run \
- --env project=amqp10_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_amqp1_0
- run: |
- docker run \
- --env project=rabbitmq_amqp1_0 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_backend_cache
- run: |
- docker run \
- --env project=rabbitmq_auth_backend_cache \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_backend_http
- run: |
- docker run \
- --env project=rabbitmq_auth_backend_http \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_backend_ldap
- run: |
- docker run \
- --env project=rabbitmq_auth_backend_ldap \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_backend_oauth2
- run: |
- docker run \
- --env project=rabbitmq_auth_backend_oauth2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_auth_mechanism_ssl
- run: |
- docker run \
- --env project=rabbitmq_auth_mechanism_ssl \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_aws
- run: |
- docker run \
- --env project=rabbitmq_aws \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_cli
- run: |
- docker run \
- --env project=rabbitmq_cli \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_consistent_hash_exchange
- run: |
- docker run \
- --env project=rabbitmq_consistent_hash_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_event_exchange
- run: |
- docker run \
- --env project=rabbitmq_event_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_federation
- run: |
- docker run \
- --env project=rabbitmq_federation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_federation_management
- run: |
- docker run \
- --env project=rabbitmq_federation_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_jms_topic_exchange
- run: |
- docker run \
- --env project=rabbitmq_jms_topic_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_management
- run: |
- docker run \
- --env project=rabbitmq_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_management_agent
- run: |
- docker run \
- --env project=rabbitmq_management_agent \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_mqtt
- run: |
- docker run \
- --env project=rabbitmq_mqtt \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_common
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_aws
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_aws \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_k8s
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_k8s \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_consul
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_consul \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_peer_discovery_etcd
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_etcd \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_prometheus
- run: |
- docker run \
- --env project=rabbitmq_prometheus \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_random_exchange
- run: |
- docker run \
- --env project=rabbitmq_random_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_recent_history_exchange
- run: |
- docker run \
- --env project=rabbitmq_recent_history_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_sharding
- run: |
- docker run \
- --env project=rabbitmq_sharding \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_shovel
- run: |
- docker run \
- --env project=rabbitmq_shovel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_shovel_management
- run: |
- docker run \
- --env project=rabbitmq_shovel_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_stomp
- run: |
- docker run \
- --env project=rabbitmq_stomp \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_top
- run: |
- docker run \
- --env project=rabbitmq_top \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_tracing
- run: |
- docker run \
- --env project=rabbitmq_tracing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_trust_store
- run: |
- docker run \
- --env project=rabbitmq_trust_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_dispatch
- run: |
- docker run \
- --env project=rabbitmq_web_dispatch \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_mqtt
- run: |
- docker run \
- --env project=rabbitmq_web_mqtt \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_mqtt_examples
- run: |
- docker run \
- --env project=rabbitmq_web_mqtt_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_stomp
- run: |
- docker run \
- --env project=rabbitmq_web_stomp \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RUN XREF rabbitmq_web_stomp_examples
- run: |
- docker run \
- --env project=rabbitmq_web_stomp_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/xref.sh
- - name: RECORD STEP FINISH
- if: always()
- run: |
- docker run \
- --env project=xref \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ci/scripts:/workspace/rabbitmq/ci/scripts \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/collect.sh
- dialyze:
- name: dialyze
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD DIALYZE START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-base:23.1
- - name: RUN DIALYZE rabbit_common
- run: |
- docker run \
- --env project=rabbit_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbit
- run: |
- docker run \
- --env project=rabbit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE amqp10_client
- run: |
- docker run \
- --env project=amqp10_client \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE amqp10_common
- run: |
- docker run \
- --env project=amqp10_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_auth_mechanism_ssl
- run: |
- docker run \
- --env project=rabbitmq_auth_mechanism_ssl \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_event_exchange
- run: |
- docker run \
- --env project=rabbitmq_event_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_federation_management
- run: |
- docker run \
- --env project=rabbitmq_federation_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_peer_discovery_common
- run: |
- docker run \
- --env project=rabbitmq_peer_discovery_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_random_exchange
- run: |
- docker run \
- --env project=rabbitmq_random_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_shovel_management
- run: |
- docker run \
- --env project=rabbitmq_shovel_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_top
- run: |
- docker run \
- --env project=rabbitmq_top \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_tracing
- run: |
- docker run \
- --env project=rabbitmq_tracing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_web_mqtt_examples
- run: |
- docker run \
- --env project=rabbitmq_web_mqtt_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RUN DIALYZE rabbitmq_web_stomp_examples
- run: |
- docker run \
- --env project=rabbitmq_web_stomp_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/dialyze.sh
- - name: RECORD STEP FINISH
- if: always()
- run: |
- docker run \
- --env project=dialyze \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ci/scripts:/workspace/rabbitmq/ci/scripts \
- --workdir /workspace/rabbitmq \
- eu.gcr.io/cf-rabbitmq-core/ci-base:23.1 \
- ci/scripts/collect.sh
- rabbit_common:
- name: rabbit_common
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbit_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbit_common-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbit-checks:
- name: rabbit-checks
- needs:
- - prepare
- runs-on: ubuntu-18.04
- outputs:
- step_start: ${{ steps.buildevents.outputs.step_start }}
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: VALIDATE KNOWN CT SUITES
- env:
- project: rabbit
- run: |
- ci/scripts/validate-workflow.sh amqqueue_backward_compatibility backing_queue channel_interceptor channel_operation_timeout cluster cluster_rename clustering_management config_schema confirms_rejects consumer_timeout crashing_queues dead_lettering definition_import disconnect_detected_during_alarm dynamic_ha dynamic_qq eager_sync feature_flags lazy_queue list_consumers_sanity_check list_queues_online_and_offline maintenance_mode many_node_ha message_size_limit metrics mirrored_supervisor msg_store peer_discovery_classic_config peer_discovery_dns per_user_connection_channel_limit per_user_connection_channel_limit_partitions per_user_connection_channel_tracking per_user_connection_tracking per_vhost_connection_limit per_vhost_connection_limit_partitions per_vhost_msg_store per_vhost_queue_limit policy priority_queue priority_queue_recovery product_info proxy_protocol publisher_confirms_parallel queue_length_limits queue_master_location queue_parallel queue_type quorum_queue rabbit_confirms rabbit_core_metrics_gc rabbit_fifo rabbit_fifo_int rabbit_fifo_prop rabbit_fifo_v0 rabbit_msg_record rabbit_stream_queue rabbitmq_queues_cli_integration rabbitmqctl_integration rabbitmqctl_shutdown signal_handling simple_ha single_active_consumer sync_detection term_to_binary_compat_prop topic_permission unit_access_control unit_access_control_authn_authz_context_propagation unit_access_control_credential_validation unit_amqp091_content_framing unit_amqp091_server_properties unit_app_management unit_cluster_formation_locking_mocks unit_collections unit_config_value_encryption unit_connection_tracking unit_credit_flow unit_disk_monitor unit_disk_monitor_mocks unit_file_handle_cache unit_gen_server2 unit_gm unit_log_config unit_log_management unit_operator_policy unit_pg_local unit_plugin_directories unit_plugin_versioning unit_policy_validators unit_priority_queue unit_queue_consumers unit_stats_and_metrics unit_supervisor2 unit_vm_memory_monitor upgrade_preparation vhost
- - name: RUN CHECKS
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci-rabbit
- dockerfile: ci/dockerfiles/ci-dep
- build_args: IMAGE_TAG=erlang-23.1-rabbitmq-${{ github.sha }},BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }},project=rabbit
- tags: erlang-23.1-rabbitmq-${{ github.sha }}
- rabbit-ct-amqqueue_backward_compatibility:
- name: rabbit-ct-amqqueue_backward_compatibility
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-amqqueue_backward_compatibility
- run: |
- mkdir ct-amqqueue_backward_compatibility-logs && chmod 777 ct-amqqueue_backward_compatibility-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=amqqueue_backward_compatibility \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-amqqueue_backward_compatibility-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-amqqueue_backward_compatibility-logs
- path: ct-amqqueue_backward_compatibility-logs/*-ct-logs-*.tar.xz
- rabbit-ct-backing_queue:
- name: rabbit-ct-backing_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-backing_queue
- run: |
- mkdir ct-backing_queue-logs && chmod 777 ct-backing_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=backing_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-backing_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-backing_queue-logs
- path: ct-backing_queue-logs/*-ct-logs-*.tar.xz
- rabbit-ct-channel_interceptor:
- name: rabbit-ct-channel_interceptor
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-channel_interceptor
- run: |
- mkdir ct-channel_interceptor-logs && chmod 777 ct-channel_interceptor-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=channel_interceptor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-channel_interceptor-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-channel_interceptor-logs
- path: ct-channel_interceptor-logs/*-ct-logs-*.tar.xz
- rabbit-ct-channel_operation_timeout:
- name: rabbit-ct-channel_operation_timeout
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-channel_operation_timeout
- run: |
- mkdir ct-channel_operation_timeout-logs && chmod 777 ct-channel_operation_timeout-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=channel_operation_timeout \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-channel_operation_timeout-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-channel_operation_timeout-logs
- path: ct-channel_operation_timeout-logs/*-ct-logs-*.tar.xz
- rabbit-ct-cluster:
- name: rabbit-ct-cluster
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-cluster
- run: |
- mkdir ct-cluster-logs && chmod 777 ct-cluster-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=cluster \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-cluster-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-cluster-logs
- path: ct-cluster-logs/*-ct-logs-*.tar.xz
- rabbit-ct-cluster_rename:
- name: rabbit-ct-cluster_rename
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-cluster_rename
- run: |
- mkdir ct-cluster_rename-logs && chmod 777 ct-cluster_rename-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=cluster_rename \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-cluster_rename-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-cluster_rename-logs
- path: ct-cluster_rename-logs/*-ct-logs-*.tar.xz
- rabbit-ct-clustering_management:
- name: rabbit-ct-clustering_management
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-clustering_management
- run: |
- mkdir ct-clustering_management-logs && chmod 777 ct-clustering_management-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=clustering_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-clustering_management-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-clustering_management-logs
- path: ct-clustering_management-logs/*-ct-logs-*.tar.xz
- rabbit-ct-config_schema:
- name: rabbit-ct-config_schema
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-config_schema
- run: |
- mkdir ct-config_schema-logs && chmod 777 ct-config_schema-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=config_schema \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-config_schema-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-config_schema-logs
- path: ct-config_schema-logs/*-ct-logs-*.tar.xz
- rabbit-ct-confirms_rejects:
- name: rabbit-ct-confirms_rejects
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-confirms_rejects
- run: |
- mkdir ct-confirms_rejects-logs && chmod 777 ct-confirms_rejects-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=confirms_rejects \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-confirms_rejects-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-confirms_rejects-logs
- path: ct-confirms_rejects-logs/*-ct-logs-*.tar.xz
- rabbit-ct-consumer_timeout:
- name: rabbit-ct-consumer_timeout
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-consumer_timeout
- run: |
- mkdir ct-consumer_timeout-logs && chmod 777 ct-consumer_timeout-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=consumer_timeout \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-consumer_timeout-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-consumer_timeout-logs
- path: ct-consumer_timeout-logs/*-ct-logs-*.tar.xz
- rabbit-ct-crashing_queues:
- name: rabbit-ct-crashing_queues
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-crashing_queues
- run: |
- mkdir ct-crashing_queues-logs && chmod 777 ct-crashing_queues-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=crashing_queues \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-crashing_queues-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-crashing_queues-logs
- path: ct-crashing_queues-logs/*-ct-logs-*.tar.xz
- rabbit-ct-dead_lettering:
- name: rabbit-ct-dead_lettering
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-dead_lettering
- run: |
- mkdir ct-dead_lettering-logs && chmod 777 ct-dead_lettering-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dead_lettering \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-dead_lettering-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dead_lettering-logs
- path: ct-dead_lettering-logs/*-ct-logs-*.tar.xz
- rabbit-ct-definition_import:
- name: rabbit-ct-definition_import
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-definition_import
- run: |
- mkdir ct-definition_import-logs && chmod 777 ct-definition_import-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=definition_import \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-definition_import-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-definition_import-logs
- path: ct-definition_import-logs/*-ct-logs-*.tar.xz
- rabbit-ct-disconnect_detected_during_alarm:
- name: rabbit-ct-disconnect_detected_during_alarm
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-disconnect_detected_during_alarm
- run: |
- mkdir ct-disconnect_detected_during_alarm-logs && chmod 777 ct-disconnect_detected_during_alarm-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=disconnect_detected_during_alarm \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-disconnect_detected_during_alarm-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-disconnect_detected_during_alarm-logs
- path: ct-disconnect_detected_during_alarm-logs/*-ct-logs-*.tar.xz
- rabbit-ct-dynamic_ha:
- name: rabbit-ct-dynamic_ha
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-dynamic_ha
- run: |
- mkdir ct-dynamic_ha-logs && chmod 777 ct-dynamic_ha-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dynamic_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-dynamic_ha-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dynamic_ha-logs
- path: ct-dynamic_ha-logs/*-ct-logs-*.tar.xz
- rabbit-ct-dynamic_qq:
- name: rabbit-ct-dynamic_qq
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-dynamic_qq
- run: |
- mkdir ct-dynamic_qq-logs && chmod 777 ct-dynamic_qq-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=dynamic_qq \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-dynamic_qq-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-dynamic_qq-logs
- path: ct-dynamic_qq-logs/*-ct-logs-*.tar.xz
- rabbit-ct-eager_sync:
- name: rabbit-ct-eager_sync
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-eager_sync
- run: |
- mkdir ct-eager_sync-logs && chmod 777 ct-eager_sync-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=eager_sync \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-eager_sync-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-eager_sync-logs
- path: ct-eager_sync-logs/*-ct-logs-*.tar.xz
- rabbit-ct-feature_flags:
- name: rabbit-ct-feature_flags
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-feature_flags
- run: |
- mkdir ct-feature_flags-logs && chmod 777 ct-feature_flags-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=feature_flags \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-feature_flags-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-feature_flags-logs
- path: ct-feature_flags-logs/*-ct-logs-*.tar.xz
- rabbit-ct-lazy_queue:
- name: rabbit-ct-lazy_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-lazy_queue
- run: |
- mkdir ct-lazy_queue-logs && chmod 777 ct-lazy_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=lazy_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-lazy_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-lazy_queue-logs
- path: ct-lazy_queue-logs/*-ct-logs-*.tar.xz
- rabbit-ct-list_consumers_sanity_check:
- name: rabbit-ct-list_consumers_sanity_check
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-list_consumers_sanity_check
- run: |
- mkdir ct-list_consumers_sanity_check-logs && chmod 777 ct-list_consumers_sanity_check-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=list_consumers_sanity_check \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-list_consumers_sanity_check-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-list_consumers_sanity_check-logs
- path: ct-list_consumers_sanity_check-logs/*-ct-logs-*.tar.xz
- rabbit-ct-list_queues_online_and_offline:
- name: rabbit-ct-list_queues_online_and_offline
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-list_queues_online_and_offline
- run: |
- mkdir ct-list_queues_online_and_offline-logs && chmod 777 ct-list_queues_online_and_offline-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=list_queues_online_and_offline \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-list_queues_online_and_offline-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-list_queues_online_and_offline-logs
- path: ct-list_queues_online_and_offline-logs/*-ct-logs-*.tar.xz
- rabbit-ct-maintenance_mode:
- name: rabbit-ct-maintenance_mode
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-maintenance_mode
- run: |
- mkdir ct-maintenance_mode-logs && chmod 777 ct-maintenance_mode-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=maintenance_mode \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-maintenance_mode-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-maintenance_mode-logs
- path: ct-maintenance_mode-logs/*-ct-logs-*.tar.xz
- rabbit-ct-many_node_ha:
- name: rabbit-ct-many_node_ha
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-many_node_ha
- run: |
- mkdir ct-many_node_ha-logs && chmod 777 ct-many_node_ha-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=many_node_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-many_node_ha-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-many_node_ha-logs
- path: ct-many_node_ha-logs/*-ct-logs-*.tar.xz
- rabbit-ct-message_size_limit:
- name: rabbit-ct-message_size_limit
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-message_size_limit
- run: |
- mkdir ct-message_size_limit-logs && chmod 777 ct-message_size_limit-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=message_size_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-message_size_limit-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-message_size_limit-logs
- path: ct-message_size_limit-logs/*-ct-logs-*.tar.xz
- rabbit-ct-metrics:
- name: rabbit-ct-metrics
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-metrics
- run: |
- mkdir ct-metrics-logs && chmod 777 ct-metrics-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=metrics \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-metrics-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-metrics-logs
- path: ct-metrics-logs/*-ct-logs-*.tar.xz
- rabbit-ct-mirrored_supervisor:
- name: rabbit-ct-mirrored_supervisor
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-mirrored_supervisor
- run: |
- mkdir ct-mirrored_supervisor-logs && chmod 777 ct-mirrored_supervisor-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=mirrored_supervisor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-mirrored_supervisor-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-mirrored_supervisor-logs
- path: ct-mirrored_supervisor-logs/*-ct-logs-*.tar.xz
- rabbit-ct-msg_store:
- name: rabbit-ct-msg_store
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-msg_store
- run: |
- mkdir ct-msg_store-logs && chmod 777 ct-msg_store-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=msg_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-msg_store-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-msg_store-logs
- path: ct-msg_store-logs/*-ct-logs-*.tar.xz
- rabbit-ct-peer_discovery_classic_config:
- name: rabbit-ct-peer_discovery_classic_config
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-peer_discovery_classic_config
- run: |
- mkdir ct-peer_discovery_classic_config-logs && chmod 777 ct-peer_discovery_classic_config-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=peer_discovery_classic_config \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-peer_discovery_classic_config-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-peer_discovery_classic_config-logs
- path: ct-peer_discovery_classic_config-logs/*-ct-logs-*.tar.xz
- rabbit-ct-peer_discovery_dns:
- name: rabbit-ct-peer_discovery_dns
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-peer_discovery_dns
- run: |
- mkdir ct-peer_discovery_dns-logs && chmod 777 ct-peer_discovery_dns-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=peer_discovery_dns \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-peer_discovery_dns-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-peer_discovery_dns-logs
- path: ct-peer_discovery_dns-logs/*-ct-logs-*.tar.xz
- rabbit-ct-per_user_connection_channel_limit:
- name: rabbit-ct-per_user_connection_channel_limit
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_user_connection_channel_limit
- run: |
- mkdir ct-per_user_connection_channel_limit-logs && chmod 777 ct-per_user_connection_channel_limit-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_user_connection_channel_limit-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_limit-logs
- path: ct-per_user_connection_channel_limit-logs/*-ct-logs-*.tar.xz
- rabbit-ct-per_user_connection_channel_limit_partitions:
- name: rabbit-ct-per_user_connection_channel_limit_partitions
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_user_connection_channel_limit_partitions
- run: |
- mkdir ct-per_user_connection_channel_limit_partitions-logs && chmod 777 ct-per_user_connection_channel_limit_partitions-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_limit_partitions \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_user_connection_channel_limit_partitions-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_limit_partitions-logs
- path: ct-per_user_connection_channel_limit_partitions-logs/*-ct-logs-*.tar.xz
- rabbit-ct-per_user_connection_channel_tracking:
- name: rabbit-ct-per_user_connection_channel_tracking
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_user_connection_channel_tracking
- run: |
- mkdir ct-per_user_connection_channel_tracking-logs && chmod 777 ct-per_user_connection_channel_tracking-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_channel_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_user_connection_channel_tracking-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_channel_tracking-logs
- path: ct-per_user_connection_channel_tracking-logs/*-ct-logs-*.tar.xz
- rabbit-ct-per_user_connection_tracking:
- name: rabbit-ct-per_user_connection_tracking
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_user_connection_tracking
- run: |
- mkdir ct-per_user_connection_tracking-logs && chmod 777 ct-per_user_connection_tracking-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_user_connection_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_user_connection_tracking-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_user_connection_tracking-logs
- path: ct-per_user_connection_tracking-logs/*-ct-logs-*.tar.xz
- rabbit-ct-per_vhost_connection_limit:
- name: rabbit-ct-per_vhost_connection_limit
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_vhost_connection_limit
- run: |
- mkdir ct-per_vhost_connection_limit-logs && chmod 777 ct-per_vhost_connection_limit-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_connection_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_vhost_connection_limit-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_connection_limit-logs
- path: ct-per_vhost_connection_limit-logs/*-ct-logs-*.tar.xz
- rabbit-ct-per_vhost_connection_limit_partitions:
- name: rabbit-ct-per_vhost_connection_limit_partitions
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_vhost_connection_limit_partitions
- run: |
- mkdir ct-per_vhost_connection_limit_partitions-logs && chmod 777 ct-per_vhost_connection_limit_partitions-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_connection_limit_partitions \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_vhost_connection_limit_partitions-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_connection_limit_partitions-logs
- path: ct-per_vhost_connection_limit_partitions-logs/*-ct-logs-*.tar.xz
- rabbit-ct-per_vhost_msg_store:
- name: rabbit-ct-per_vhost_msg_store
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_vhost_msg_store
- run: |
- mkdir ct-per_vhost_msg_store-logs && chmod 777 ct-per_vhost_msg_store-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_msg_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_vhost_msg_store-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_msg_store-logs
- path: ct-per_vhost_msg_store-logs/*-ct-logs-*.tar.xz
- rabbit-ct-per_vhost_queue_limit:
- name: rabbit-ct-per_vhost_queue_limit
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-per_vhost_queue_limit
- run: |
- mkdir ct-per_vhost_queue_limit-logs && chmod 777 ct-per_vhost_queue_limit-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=per_vhost_queue_limit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-per_vhost_queue_limit-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-per_vhost_queue_limit-logs
- path: ct-per_vhost_queue_limit-logs/*-ct-logs-*.tar.xz
- rabbit-ct-policy:
- name: rabbit-ct-policy
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-policy
- run: |
- mkdir ct-policy-logs && chmod 777 ct-policy-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=policy \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-policy-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-policy-logs
- path: ct-policy-logs/*-ct-logs-*.tar.xz
- rabbit-ct-priority_queue:
- name: rabbit-ct-priority_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-priority_queue
- run: |
- mkdir ct-priority_queue-logs && chmod 777 ct-priority_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=priority_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-priority_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-priority_queue-logs
- path: ct-priority_queue-logs/*-ct-logs-*.tar.xz
- rabbit-ct-priority_queue_recovery:
- name: rabbit-ct-priority_queue_recovery
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-priority_queue_recovery
- run: |
- mkdir ct-priority_queue_recovery-logs && chmod 777 ct-priority_queue_recovery-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=priority_queue_recovery \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-priority_queue_recovery-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-priority_queue_recovery-logs
- path: ct-priority_queue_recovery-logs/*-ct-logs-*.tar.xz
- rabbit-ct-product_info:
- name: rabbit-ct-product_info
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-product_info
- run: |
- mkdir ct-product_info-logs && chmod 777 ct-product_info-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=product_info \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-product_info-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-product_info-logs
- path: ct-product_info-logs/*-ct-logs-*.tar.xz
- rabbit-ct-proxy_protocol:
- name: rabbit-ct-proxy_protocol
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-proxy_protocol
- run: |
- mkdir ct-proxy_protocol-logs && chmod 777 ct-proxy_protocol-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=proxy_protocol \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-proxy_protocol-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-proxy_protocol-logs
- path: ct-proxy_protocol-logs/*-ct-logs-*.tar.xz
- rabbit-ct-publisher_confirms_parallel:
- name: rabbit-ct-publisher_confirms_parallel
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-publisher_confirms_parallel
- run: |
- mkdir ct-publisher_confirms_parallel-logs && chmod 777 ct-publisher_confirms_parallel-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=publisher_confirms_parallel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-publisher_confirms_parallel-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-publisher_confirms_parallel-logs
- path: ct-publisher_confirms_parallel-logs/*-ct-logs-*.tar.xz
- rabbit-ct-queue_length_limits:
- name: rabbit-ct-queue_length_limits
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-queue_length_limits
- run: |
- mkdir ct-queue_length_limits-logs && chmod 777 ct-queue_length_limits-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_length_limits \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-queue_length_limits-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_length_limits-logs
- path: ct-queue_length_limits-logs/*-ct-logs-*.tar.xz
- rabbit-ct-queue_master_location:
- name: rabbit-ct-queue_master_location
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-queue_master_location
- run: |
- mkdir ct-queue_master_location-logs && chmod 777 ct-queue_master_location-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_master_location \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-queue_master_location-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_master_location-logs
- path: ct-queue_master_location-logs/*-ct-logs-*.tar.xz
- rabbit-ct-queue_parallel:
- name: rabbit-ct-queue_parallel
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-queue_parallel
- run: |
- mkdir ct-queue_parallel-logs && chmod 777 ct-queue_parallel-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_parallel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-queue_parallel-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_parallel-logs
- path: ct-queue_parallel-logs/*-ct-logs-*.tar.xz
- rabbit-ct-queue_type:
- name: rabbit-ct-queue_type
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-queue_type
- run: |
- mkdir ct-queue_type-logs && chmod 777 ct-queue_type-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=queue_type \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-queue_type-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-queue_type-logs
- path: ct-queue_type-logs/*-ct-logs-*.tar.xz
- rabbit-ct-quorum_queue:
- name: rabbit-ct-quorum_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-quorum_queue
- run: |
- mkdir ct-quorum_queue-logs && chmod 777 ct-quorum_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=quorum_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-quorum_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-quorum_queue-logs
- path: ct-quorum_queue-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_confirms:
- name: rabbit-ct-rabbit_confirms
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_confirms
- run: |
- mkdir ct-rabbit_confirms-logs && chmod 777 ct-rabbit_confirms-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_confirms \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_confirms-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_confirms-logs
- path: ct-rabbit_confirms-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_core_metrics_gc:
- name: rabbit-ct-rabbit_core_metrics_gc
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_core_metrics_gc
- run: |
- mkdir ct-rabbit_core_metrics_gc-logs && chmod 777 ct-rabbit_core_metrics_gc-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_core_metrics_gc \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_core_metrics_gc-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_core_metrics_gc-logs
- path: ct-rabbit_core_metrics_gc-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_fifo:
- name: rabbit-ct-rabbit_fifo
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_fifo
- run: |
- mkdir ct-rabbit_fifo-logs && chmod 777 ct-rabbit_fifo-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_fifo-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo-logs
- path: ct-rabbit_fifo-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_fifo_int:
- name: rabbit-ct-rabbit_fifo_int
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_fifo_int
- run: |
- mkdir ct-rabbit_fifo_int-logs && chmod 777 ct-rabbit_fifo_int-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_int \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_fifo_int-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_int-logs
- path: ct-rabbit_fifo_int-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_fifo_prop:
- name: rabbit-ct-rabbit_fifo_prop
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_fifo_prop
- run: |
- mkdir ct-rabbit_fifo_prop-logs && chmod 777 ct-rabbit_fifo_prop-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_prop \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_fifo_prop-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_prop-logs
- path: ct-rabbit_fifo_prop-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_fifo_v0:
- name: rabbit-ct-rabbit_fifo_v0
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_fifo_v0
- run: |
- mkdir ct-rabbit_fifo_v0-logs && chmod 777 ct-rabbit_fifo_v0-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_fifo_v0 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_fifo_v0-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_fifo_v0-logs
- path: ct-rabbit_fifo_v0-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_msg_record:
- name: rabbit-ct-rabbit_msg_record
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_msg_record
- run: |
- mkdir ct-rabbit_msg_record-logs && chmod 777 ct-rabbit_msg_record-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_msg_record \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_msg_record-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_msg_record-logs
- path: ct-rabbit_msg_record-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbit_stream_queue:
- name: rabbit-ct-rabbit_stream_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbit_stream_queue
- run: |
- mkdir ct-rabbit_stream_queue-logs && chmod 777 ct-rabbit_stream_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbit_stream_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbit_stream_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbit_stream_queue-logs
- path: ct-rabbit_stream_queue-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbitmq_queues_cli_integration:
- name: rabbit-ct-rabbitmq_queues_cli_integration
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbitmq_queues_cli_integration
- run: |
- mkdir ct-rabbitmq_queues_cli_integration-logs && chmod 777 ct-rabbitmq_queues_cli_integration-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmq_queues_cli_integration \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbitmq_queues_cli_integration-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmq_queues_cli_integration-logs
- path: ct-rabbitmq_queues_cli_integration-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbitmqctl_integration:
- name: rabbit-ct-rabbitmqctl_integration
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbitmqctl_integration
- run: |
- mkdir ct-rabbitmqctl_integration-logs && chmod 777 ct-rabbitmqctl_integration-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmqctl_integration \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbitmqctl_integration-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmqctl_integration-logs
- path: ct-rabbitmqctl_integration-logs/*-ct-logs-*.tar.xz
- rabbit-ct-rabbitmqctl_shutdown:
- name: rabbit-ct-rabbitmqctl_shutdown
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-rabbitmqctl_shutdown
- run: |
- mkdir ct-rabbitmqctl_shutdown-logs && chmod 777 ct-rabbitmqctl_shutdown-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=rabbitmqctl_shutdown \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-rabbitmqctl_shutdown-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-rabbitmqctl_shutdown-logs
- path: ct-rabbitmqctl_shutdown-logs/*-ct-logs-*.tar.xz
- rabbit-ct-signal_handling:
- name: rabbit-ct-signal_handling
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-signal_handling
- run: |
- mkdir ct-signal_handling-logs && chmod 777 ct-signal_handling-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=signal_handling \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-signal_handling-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-signal_handling-logs
- path: ct-signal_handling-logs/*-ct-logs-*.tar.xz
- rabbit-ct-simple_ha:
- name: rabbit-ct-simple_ha
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-simple_ha
- run: |
- mkdir ct-simple_ha-logs && chmod 777 ct-simple_ha-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=simple_ha \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-simple_ha-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-simple_ha-logs
- path: ct-simple_ha-logs/*-ct-logs-*.tar.xz
- rabbit-ct-single_active_consumer:
- name: rabbit-ct-single_active_consumer
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-single_active_consumer
- run: |
- mkdir ct-single_active_consumer-logs && chmod 777 ct-single_active_consumer-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=single_active_consumer \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-single_active_consumer-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-single_active_consumer-logs
- path: ct-single_active_consumer-logs/*-ct-logs-*.tar.xz
- rabbit-ct-sync_detection:
- name: rabbit-ct-sync_detection
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-sync_detection
- run: |
- mkdir ct-sync_detection-logs && chmod 777 ct-sync_detection-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=sync_detection \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-sync_detection-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-sync_detection-logs
- path: ct-sync_detection-logs/*-ct-logs-*.tar.xz
- rabbit-ct-term_to_binary_compat_prop:
- name: rabbit-ct-term_to_binary_compat_prop
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-term_to_binary_compat_prop
- run: |
- mkdir ct-term_to_binary_compat_prop-logs && chmod 777 ct-term_to_binary_compat_prop-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=term_to_binary_compat_prop \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-term_to_binary_compat_prop-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-term_to_binary_compat_prop-logs
- path: ct-term_to_binary_compat_prop-logs/*-ct-logs-*.tar.xz
- rabbit-ct-topic_permission:
- name: rabbit-ct-topic_permission
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-topic_permission
- run: |
- mkdir ct-topic_permission-logs && chmod 777 ct-topic_permission-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=topic_permission \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-topic_permission-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-topic_permission-logs
- path: ct-topic_permission-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_access_control:
- name: rabbit-ct-unit_access_control
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_access_control
- run: |
- mkdir ct-unit_access_control-logs && chmod 777 ct-unit_access_control-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_access_control-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control-logs
- path: ct-unit_access_control-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_access_control_authn_authz_context_propagation:
- name: rabbit-ct-unit_access_control_authn_authz_context_propagation
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_access_control_authn_authz_context_propagation
- run: |
- mkdir ct-unit_access_control_authn_authz_context_propagation-logs && chmod 777 ct-unit_access_control_authn_authz_context_propagation-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control_authn_authz_context_propagation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_access_control_authn_authz_context_propagation-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control_authn_authz_context_propagation-logs
- path: ct-unit_access_control_authn_authz_context_propagation-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_access_control_credential_validation:
- name: rabbit-ct-unit_access_control_credential_validation
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_access_control_credential_validation
- run: |
- mkdir ct-unit_access_control_credential_validation-logs && chmod 777 ct-unit_access_control_credential_validation-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_access_control_credential_validation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_access_control_credential_validation-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_access_control_credential_validation-logs
- path: ct-unit_access_control_credential_validation-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_amqp091_content_framing:
- name: rabbit-ct-unit_amqp091_content_framing
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_amqp091_content_framing
- run: |
- mkdir ct-unit_amqp091_content_framing-logs && chmod 777 ct-unit_amqp091_content_framing-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_amqp091_content_framing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_amqp091_content_framing-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_amqp091_content_framing-logs
- path: ct-unit_amqp091_content_framing-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_amqp091_server_properties:
- name: rabbit-ct-unit_amqp091_server_properties
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_amqp091_server_properties
- run: |
- mkdir ct-unit_amqp091_server_properties-logs && chmod 777 ct-unit_amqp091_server_properties-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_amqp091_server_properties \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_amqp091_server_properties-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_amqp091_server_properties-logs
- path: ct-unit_amqp091_server_properties-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_app_management:
- name: rabbit-ct-unit_app_management
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_app_management
- run: |
- mkdir ct-unit_app_management-logs && chmod 777 ct-unit_app_management-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_app_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_app_management-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_app_management-logs
- path: ct-unit_app_management-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_cluster_formation_locking_mocks:
- name: rabbit-ct-unit_cluster_formation_locking_mocks
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_cluster_formation_locking_mocks
- run: |
- mkdir ct-unit_cluster_formation_locking_mocks-logs && chmod 777 ct-unit_cluster_formation_locking_mocks-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_cluster_formation_locking_mocks \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_cluster_formation_locking_mocks-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_cluster_formation_locking_mocks-logs
- path: ct-unit_cluster_formation_locking_mocks-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_collections:
- name: rabbit-ct-unit_collections
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_collections
- run: |
- mkdir ct-unit_collections-logs && chmod 777 ct-unit_collections-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_collections \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_collections-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_collections-logs
- path: ct-unit_collections-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_config_value_encryption:
- name: rabbit-ct-unit_config_value_encryption
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_config_value_encryption
- run: |
- mkdir ct-unit_config_value_encryption-logs && chmod 777 ct-unit_config_value_encryption-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_config_value_encryption \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_config_value_encryption-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_config_value_encryption-logs
- path: ct-unit_config_value_encryption-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_connection_tracking:
- name: rabbit-ct-unit_connection_tracking
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_connection_tracking
- run: |
- mkdir ct-unit_connection_tracking-logs && chmod 777 ct-unit_connection_tracking-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_connection_tracking \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_connection_tracking-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_connection_tracking-logs
- path: ct-unit_connection_tracking-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_credit_flow:
- name: rabbit-ct-unit_credit_flow
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_credit_flow
- run: |
- mkdir ct-unit_credit_flow-logs && chmod 777 ct-unit_credit_flow-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_credit_flow \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_credit_flow-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_credit_flow-logs
- path: ct-unit_credit_flow-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_disk_monitor:
- name: rabbit-ct-unit_disk_monitor
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_disk_monitor
- run: |
- mkdir ct-unit_disk_monitor-logs && chmod 777 ct-unit_disk_monitor-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_disk_monitor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_disk_monitor-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_disk_monitor-logs
- path: ct-unit_disk_monitor-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_disk_monitor_mocks:
- name: rabbit-ct-unit_disk_monitor_mocks
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_disk_monitor_mocks
- run: |
- mkdir ct-unit_disk_monitor_mocks-logs && chmod 777 ct-unit_disk_monitor_mocks-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_disk_monitor_mocks \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_disk_monitor_mocks-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_disk_monitor_mocks-logs
- path: ct-unit_disk_monitor_mocks-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_file_handle_cache:
- name: rabbit-ct-unit_file_handle_cache
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_file_handle_cache
- run: |
- mkdir ct-unit_file_handle_cache-logs && chmod 777 ct-unit_file_handle_cache-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_file_handle_cache \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_file_handle_cache-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_file_handle_cache-logs
- path: ct-unit_file_handle_cache-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_gen_server2:
- name: rabbit-ct-unit_gen_server2
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_gen_server2
- run: |
- mkdir ct-unit_gen_server2-logs && chmod 777 ct-unit_gen_server2-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_gen_server2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_gen_server2-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_gen_server2-logs
- path: ct-unit_gen_server2-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_gm:
- name: rabbit-ct-unit_gm
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_gm
- run: |
- mkdir ct-unit_gm-logs && chmod 777 ct-unit_gm-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_gm \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_gm-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_gm-logs
- path: ct-unit_gm-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_log_config:
- name: rabbit-ct-unit_log_config
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_log_config
- run: |
- mkdir ct-unit_log_config-logs && chmod 777 ct-unit_log_config-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_log_config \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_log_config-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_log_config-logs
- path: ct-unit_log_config-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_log_management:
- name: rabbit-ct-unit_log_management
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_log_management
- run: |
- mkdir ct-unit_log_management-logs && chmod 777 ct-unit_log_management-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_log_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_log_management-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_log_management-logs
- path: ct-unit_log_management-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_operator_policy:
- name: rabbit-ct-unit_operator_policy
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_operator_policy
- run: |
- mkdir ct-unit_operator_policy-logs && chmod 777 ct-unit_operator_policy-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_operator_policy \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_operator_policy-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_operator_policy-logs
- path: ct-unit_operator_policy-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_pg_local:
- name: rabbit-ct-unit_pg_local
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_pg_local
- run: |
- mkdir ct-unit_pg_local-logs && chmod 777 ct-unit_pg_local-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_pg_local \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_pg_local-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_pg_local-logs
- path: ct-unit_pg_local-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_plugin_directories:
- name: rabbit-ct-unit_plugin_directories
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_plugin_directories
- run: |
- mkdir ct-unit_plugin_directories-logs && chmod 777 ct-unit_plugin_directories-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_plugin_directories \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_plugin_directories-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_plugin_directories-logs
- path: ct-unit_plugin_directories-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_plugin_versioning:
- name: rabbit-ct-unit_plugin_versioning
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_plugin_versioning
- run: |
- mkdir ct-unit_plugin_versioning-logs && chmod 777 ct-unit_plugin_versioning-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_plugin_versioning \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_plugin_versioning-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_plugin_versioning-logs
- path: ct-unit_plugin_versioning-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_policy_validators:
- name: rabbit-ct-unit_policy_validators
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_policy_validators
- run: |
- mkdir ct-unit_policy_validators-logs && chmod 777 ct-unit_policy_validators-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_policy_validators \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_policy_validators-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_policy_validators-logs
- path: ct-unit_policy_validators-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_priority_queue:
- name: rabbit-ct-unit_priority_queue
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_priority_queue
- run: |
- mkdir ct-unit_priority_queue-logs && chmod 777 ct-unit_priority_queue-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_priority_queue \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_priority_queue-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_priority_queue-logs
- path: ct-unit_priority_queue-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_queue_consumers:
- name: rabbit-ct-unit_queue_consumers
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_queue_consumers
- run: |
- mkdir ct-unit_queue_consumers-logs && chmod 777 ct-unit_queue_consumers-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_queue_consumers \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_queue_consumers-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_queue_consumers-logs
- path: ct-unit_queue_consumers-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_stats_and_metrics:
- name: rabbit-ct-unit_stats_and_metrics
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_stats_and_metrics
- run: |
- mkdir ct-unit_stats_and_metrics-logs && chmod 777 ct-unit_stats_and_metrics-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_stats_and_metrics \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_stats_and_metrics-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_stats_and_metrics-logs
- path: ct-unit_stats_and_metrics-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_supervisor2:
- name: rabbit-ct-unit_supervisor2
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_supervisor2
- run: |
- mkdir ct-unit_supervisor2-logs && chmod 777 ct-unit_supervisor2-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_supervisor2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_supervisor2-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_supervisor2-logs
- path: ct-unit_supervisor2-logs/*-ct-logs-*.tar.xz
- rabbit-ct-unit_vm_memory_monitor:
- name: rabbit-ct-unit_vm_memory_monitor
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-unit_vm_memory_monitor
- run: |
- mkdir ct-unit_vm_memory_monitor-logs && chmod 777 ct-unit_vm_memory_monitor-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=unit_vm_memory_monitor \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-unit_vm_memory_monitor-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-unit_vm_memory_monitor-logs
- path: ct-unit_vm_memory_monitor-logs/*-ct-logs-*.tar.xz
- rabbit-ct-upgrade_preparation:
- name: rabbit-ct-upgrade_preparation
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-upgrade_preparation
- run: |
- mkdir ct-upgrade_preparation-logs && chmod 777 ct-upgrade_preparation-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=upgrade_preparation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-upgrade_preparation-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-upgrade_preparation-logs
- path: ct-upgrade_preparation-logs/*-ct-logs-*.tar.xz
- rabbit-ct-vhost:
- name: rabbit-ct-vhost
- needs:
- - prepare
- - rabbit-checks
- runs-on: ubuntu-18.04
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- - name: RUN ct-vhost
- run: |
- mkdir ct-vhost-logs && chmod 777 ct-vhost-logs
- docker run \
- --env project=rabbit \
- --env CT_SUITE=vhost \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-vhost-logs:/workspace/ct-logs \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: always()
- with:
- name: rabbit-ct-vhost-logs
- path: ct-vhost-logs/*-ct-logs-*.tar.xz
- rabbit:
- name: rabbit
- needs:
- - rabbit-checks
- - rabbit-ct-amqqueue_backward_compatibility
- - rabbit-ct-backing_queue
- - rabbit-ct-channel_interceptor
- - rabbit-ct-channel_operation_timeout
- - rabbit-ct-cluster
- - rabbit-ct-cluster_rename
- - rabbit-ct-clustering_management
- - rabbit-ct-config_schema
- - rabbit-ct-confirms_rejects
- - rabbit-ct-consumer_timeout
- - rabbit-ct-crashing_queues
- - rabbit-ct-dead_lettering
- - rabbit-ct-definition_import
- - rabbit-ct-disconnect_detected_during_alarm
- - rabbit-ct-dynamic_ha
- - rabbit-ct-dynamic_qq
- - rabbit-ct-eager_sync
- - rabbit-ct-feature_flags
- - rabbit-ct-lazy_queue
- - rabbit-ct-list_consumers_sanity_check
- - rabbit-ct-list_queues_online_and_offline
- - rabbit-ct-maintenance_mode
- - rabbit-ct-many_node_ha
- - rabbit-ct-message_size_limit
- - rabbit-ct-metrics
- - rabbit-ct-mirrored_supervisor
- - rabbit-ct-msg_store
- - rabbit-ct-peer_discovery_classic_config
- - rabbit-ct-peer_discovery_dns
- - rabbit-ct-per_user_connection_channel_limit
- - rabbit-ct-per_user_connection_channel_limit_partitions
- - rabbit-ct-per_user_connection_channel_tracking
- - rabbit-ct-per_user_connection_tracking
- - rabbit-ct-per_vhost_connection_limit
- - rabbit-ct-per_vhost_connection_limit_partitions
- - rabbit-ct-per_vhost_msg_store
- - rabbit-ct-per_vhost_queue_limit
- - rabbit-ct-policy
- - rabbit-ct-priority_queue
- - rabbit-ct-priority_queue_recovery
- - rabbit-ct-product_info
- - rabbit-ct-proxy_protocol
- - rabbit-ct-publisher_confirms_parallel
- - rabbit-ct-queue_length_limits
- - rabbit-ct-queue_master_location
- - rabbit-ct-queue_parallel
- - rabbit-ct-queue_type
- - rabbit-ct-quorum_queue
- - rabbit-ct-rabbit_confirms
- - rabbit-ct-rabbit_core_metrics_gc
- - rabbit-ct-rabbit_fifo
- - rabbit-ct-rabbit_fifo_int
- - rabbit-ct-rabbit_fifo_prop
- - rabbit-ct-rabbit_fifo_v0
- - rabbit-ct-rabbit_msg_record
- - rabbit-ct-rabbit_stream_queue
- - rabbit-ct-rabbitmq_queues_cli_integration
- - rabbit-ct-rabbitmqctl_integration
- - rabbit-ct-rabbitmqctl_shutdown
- - rabbit-ct-signal_handling
- - rabbit-ct-simple_ha
- - rabbit-ct-single_active_consumer
- - rabbit-ct-sync_detection
- - rabbit-ct-term_to_binary_compat_prop
- - rabbit-ct-topic_permission
- - rabbit-ct-unit_access_control
- - rabbit-ct-unit_access_control_authn_authz_context_propagation
- - rabbit-ct-unit_access_control_credential_validation
- - rabbit-ct-unit_amqp091_content_framing
- - rabbit-ct-unit_amqp091_server_properties
- - rabbit-ct-unit_app_management
- - rabbit-ct-unit_cluster_formation_locking_mocks
- - rabbit-ct-unit_collections
- - rabbit-ct-unit_config_value_encryption
- - rabbit-ct-unit_connection_tracking
- - rabbit-ct-unit_credit_flow
- - rabbit-ct-unit_disk_monitor
- - rabbit-ct-unit_disk_monitor_mocks
- - rabbit-ct-unit_file_handle_cache
- - rabbit-ct-unit_gen_server2
- - rabbit-ct-unit_gm
- - rabbit-ct-unit_log_config
- - rabbit-ct-unit_log_management
- - rabbit-ct-unit_operator_policy
- - rabbit-ct-unit_pg_local
- - rabbit-ct-unit_plugin_directories
- - rabbit-ct-unit_plugin_versioning
- - rabbit-ct-unit_policy_validators
- - rabbit-ct-unit_priority_queue
- - rabbit-ct-unit_queue_consumers
- - rabbit-ct-unit_stats_and_metrics
- - rabbit-ct-unit_supervisor2
- - rabbit-ct-unit_vm_memory_monitor
- - rabbit-ct-upgrade_preparation
- - rabbit-ct-vhost
- runs-on: ubuntu-18.04
- if: always()
- steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD STEP FINISH
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }}
- docker run \
- --env project=rabbit \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ needs.rabbit-checks.outputs.step_start }} \
- eu.gcr.io/cf-rabbitmq-core/ci-rabbit:erlang-23.1-rabbitmq-${{ github.sha }} \
- /workspace/rabbitmq/ci/scripts/collect.sh
- amqp_client:
- name: amqp_client
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=amqp_client \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: amqp_client-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- amqp10_common:
- name: amqp10_common
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=amqp10_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: amqp10_common-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_auth_backend_cache:
- name: rabbitmq_auth_backend_cache
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_auth_backend_cache \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_auth_backend_cache-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_auth_backend_http:
- name: rabbitmq_auth_backend_http
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_auth_backend_http \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_auth_backend_http-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_auth_backend_oauth2:
- name: rabbitmq_auth_backend_oauth2
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_auth_backend_oauth2 \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_auth_backend_oauth2-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_auth_mechanism_ssl:
- name: rabbitmq_auth_mechanism_ssl
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_auth_mechanism_ssl \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_auth_mechanism_ssl-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_aws:
- name: rabbitmq_aws
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_aws \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_aws-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_cli:
- name: rabbitmq_cli
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir broker-logs && chmod 777 broker-logs
- docker run \
- --env project=rabbitmq_cli \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/broker-logs:/workspace/broker-logs \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/rabbitmq_cli.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_cli-broker-logs
- path: broker-logs/broker-logs.tar.xz
- rabbitmq_consistent_hash_exchange:
- name: rabbitmq_consistent_hash_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_consistent_hash_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_consistent_hash_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_event_exchange:
- name: rabbitmq_event_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_event_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_event_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_federation:
- name: rabbitmq_federation
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_federation \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_federation-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_federation_management:
- name: rabbitmq_federation_management
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_federation_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_federation_management-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_jms_topic_exchange:
- name: rabbitmq_jms_topic_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_jms_topic_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_jms_topic_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_management:
- name: rabbitmq_management
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_management-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_management_agent:
- name: rabbitmq_management_agent
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_management_agent \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_management_agent-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_peer_discovery_common:
- name: rabbitmq_peer_discovery_common
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_peer_discovery_common \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_peer_discovery_common-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_peer_discovery_k8s:
- name: rabbitmq_peer_discovery_k8s
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_peer_discovery_k8s \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_peer_discovery_k8s-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_peer_discovery_consul:
- name: rabbitmq_peer_discovery_consul
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_peer_discovery_consul \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_peer_discovery_consul-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_prometheus:
- name: rabbitmq_prometheus
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_prometheus \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_prometheus-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_random_exchange:
- name: rabbitmq_random_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_random_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_random_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_recent_history_exchange:
- name: rabbitmq_recent_history_exchange
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_recent_history_exchange \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_recent_history_exchange-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_sharding:
- name: rabbitmq_sharding
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_sharding \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_sharding-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_shovel:
- name: rabbitmq_shovel
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_shovel \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_shovel-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_shovel_management:
- name: rabbitmq_shovel_management
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_shovel_management \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_shovel_management-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_stomp:
- name: rabbitmq_stomp
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_stomp \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_stomp-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_top:
- name: rabbitmq_top
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_top \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_top-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_tracing:
- name: rabbitmq_tracing
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_tracing \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_tracing-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_trust_store:
- name: rabbitmq_trust_store
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_trust_store \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_trust_store-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_dispatch:
- name: rabbitmq_web_dispatch
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_dispatch \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_dispatch-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_mqtt:
- name: rabbitmq_web_mqtt
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_mqtt \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_mqtt-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_mqtt_examples:
- name: rabbitmq_web_mqtt_examples
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_mqtt_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_mqtt_examples-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_stomp:
- name: rabbitmq_web_stomp
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_stomp \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_stomp-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- rabbitmq_web_stomp_examples:
- name: rabbitmq_web_stomp_examples
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=rabbitmq_web_stomp_examples \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: rabbitmq_web_stomp_examples-ct-logs
- path: ct-logs/*-ct-logs-*.tar.xz
- package-generic-unix:
- name: package-generic-unix
- needs:
- - prepare
- runs-on: ubuntu-18.04
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PACKAGE GENERIC UNIX
- run: |
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- mkdir PACKAGES && chmod 777 PACKAGES
- docker run \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/PACKAGES:/workspace/rabbitmq/PACKAGES \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/package_generic_unix.sh
- - name: UPLOAD GENERIC UNIX PACKAGE ARTIFACT
- uses: actions/upload-artifact@v2-preview
- with:
- name: rabbitmq-server-generic-unix.tar.xz
- path: PACKAGES/rabbitmq-server-generic-unix-*.tar.xz
- - name: RECORD STEP FINISH
- if: always()
- run: |
- docker run \
- --env project=packaging \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/collect.sh
- finish:
- name: finish
- needs:
- - prepare
- - xref
- - dialyze
- - rabbit_common
- - rabbit
- - amqp_client
- - amqp10_common
- - rabbitmq_auth_backend_cache
- - rabbitmq_auth_backend_http
- - rabbitmq_auth_backend_oauth2
- - rabbitmq_auth_mechanism_ssl
- - rabbitmq_aws
- - rabbitmq_cli
- - rabbitmq_consistent_hash_exchange
- - rabbitmq_event_exchange
- - rabbitmq_federation
- - rabbitmq_federation_management
- - rabbitmq_jms_topic_exchange
- - rabbitmq_management
- - rabbitmq_management_agent
- - rabbitmq_peer_discovery_common
- - rabbitmq_peer_discovery_k8s
- - rabbitmq_peer_discovery_consul
- - rabbitmq_prometheus
- - rabbitmq_random_exchange
- - rabbitmq_recent_history_exchange
- - rabbitmq_sharding
- - rabbitmq_shovel
- - rabbitmq_shovel_management
- - rabbitmq_stomp
- - rabbitmq_top
- - rabbitmq_tracing
- - rabbitmq_trust_store
- - rabbitmq_web_dispatch
- - rabbitmq_web_mqtt
- - rabbitmq_web_mqtt_examples
- - rabbitmq_web_stomp
- - rabbitmq_web_stomp_examples
- - package-generic-unix
- runs-on: ubuntu-18.04
- if: always()
- steps:
- - uses: technote-space/workflow-conclusion-action@v1
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD BUILD FINISH
- run: |
- echo -n "${{ env.WORKFLOW_CONCLUSION }}" > conclusion
-
- gsutil cp conclusion \
- 'gs://monorepo_github_actions_conclusions/${{ github.sha }}/${{ github.workflow }}'
-
- gcloud auth configure-docker
- docker pull eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }}
- docker run \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env BUILD_START=${{ needs.prepare.outputs.build_start }} \
- --env BUILD_RESULT=${{ env.WORKFLOW_CONCLUSION }} \
- eu.gcr.io/cf-rabbitmq-core/ci:erlang-23.1-rabbitmq-${{ github.sha }} \
- ci/scripts/finish.sh
diff --git a/.github/workflows/test-mixed-versions.yaml b/.github/workflows/test-mixed-versions.yaml
new file mode 100644
index 0000000000..e14abfee58
--- /dev/null
+++ b/.github/workflows/test-mixed-versions.yaml
@@ -0,0 +1,115 @@
+name: Test Mixed Version Clusters
+on:
+ push:
+ branches:
+ - master
+ - v3.9.x
+ - v3.8.x
+ - bump-rbe-image-*
+ - bump-otp-for-oci
+ - bump-rabbitmq-ct-helpers
+ paths:
+ - 'deps/**'
+ - 'scripts/**'
+ - Makefile
+ - plugins.mk
+ - rabbitmq-components.mk
+ - .bazelrc
+ - .bazelversion
+ - BUILD.*
+ - '*.bzl'
+ - '*.bazel'
+ - .github/workflows/test-mixed-versions.yaml
+ workflow_dispatch:
+jobs:
+ test-mixed-versions:
+ name: Test (Mixed Version Cluster)
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ erlang_version:
+ - "23"
+ - "24"
+ timeout-minutes: 120
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: MOUNT BAZEL CACHE
+ uses: actions/cache@v1
+ with:
+ path: "/home/runner/repo-cache/"
+ key: repo-cache
+ - name: CONFIGURE BAZEL
+ run: |
+ cat << EOF >> user.bazelrc
+ build:buildbuddy --remote_header=x-buildbuddy-api-key=${{ secrets.BUILDBUDDY_API_KEY }}
+
+ build:buildbuddy --build_metadata=ROLE=CI
+ build:buildbuddy --build_metadata=VISIBILITY=PUBLIC
+ build:buildbuddy --remote_instance_name=buildbuddy-io/buildbuddy/ci-${{ matrix.erlang_version }}
+ build:buildbuddy --repository_cache=/home/runner/repo-cache/
+ build:buildbuddy --color=yes
+ build:buildbuddy --disk_cache=
+ EOF
+ #! - name: Setup tmate session
+ #! uses: mxschmitt/action-tmate@v3
+ - name: RUN TESTS
+ run: |
+ sudo sysctl -w net.ipv4.tcp_keepalive_time=60
+ sudo ethtool -K eth0 tso off gso off gro off tx off rx off lro off
+ bazelisk test //... \
+ --config=rbe-${{ matrix.erlang_version }} \
+ --test_tag_filters=mixed-version-cluster,-exclusive,-aws \
+ --verbose_failures
+ test-exclusive-mixed-versions:
+ name: Test (Exclusive Tests with Mixed Version Cluster)
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ erlang_version:
+ - "23"
+ - "24"
+ timeout-minutes: 60
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: CONFIGURE OTP & ELIXIR
+ uses: erlef/setup-beam@v1.9
+ with:
+ otp-version: ${{ matrix.erlang_version }}
+ elixir-version: 1.11.4
+ - name: MOUNT BAZEL CACHE
+ uses: actions/cache@v1
+ with:
+ path: "/home/runner/repo-cache/"
+ key: repo-cache
+ - name: CONFIGURE BAZEL
+ run: |
+ ERLANG_HOME="$(dirname $(dirname $(which erl)))"
+ ELIXIR_HOME="$(dirname $(dirname $(which iex)))"
+ cat << EOF >> user.bazelrc
+ build:buildbuddy --remote_header=x-buildbuddy-api-key=${{ secrets.BUILDBUDDY_API_KEY }}
+
+ build:buildbuddy --build_metadata=ROLE=CI
+ build:buildbuddy --build_metadata=VISIBILITY=PRIVATE
+ build:buildbuddy --remote_instance_name=buildbuddy-io/buildbuddy/ci-exclusive-${{ matrix.erlang_version }}
+ build:buildbuddy --repository_cache=/home/runner/repo-cache/
+ build:buildbuddy --color=yes
+ build:buildbuddy --disk_cache=
+
+ build --@bazel-erlang//:erlang_version=${{ matrix.erlang_version }}
+ build --@bazel-erlang//:erlang_home=${ERLANG_HOME}
+ build --//:elixir_home=${ELIXIR_HOME}
+ EOF
+ #! - name: Setup tmate session
+ #! uses: mxschmitt/action-tmate@v3
+ - name: RUN EXCLUSIVE TESTS
+ run: |
+ MIXED_EXCLUSIVE_TESTS=$(bazel query 'attr(tags, "mixed-version-cluster", attr(tags, "exclusive", tests(//...)))')
+ bazelisk test $MIXED_EXCLUSIVE_TESTS \
+ --config=buildbuddy \
+ --test_tag_filters=-aws \
+ --build_tests_only \
+ --test_env RABBITMQ_CT_HELPERS_DELETE_UNUSED_NODES=true \
+ --verbose_failures
diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
new file mode 100644
index 0000000000..ce66e26c73
--- /dev/null
+++ b/.github/workflows/test.yaml
@@ -0,0 +1,109 @@
+name: Test
+on:
+ push:
+ paths:
+ - 'deps/**'
+ - 'scripts/**'
+ - Makefile
+ - plugins.mk
+ - rabbitmq-components.mk
+ - .bazelrc
+ - .bazelversion
+ - BUILD.*
+ - '*.bzl'
+ - '*.bazel'
+ - .github/workflows/test.yaml
+ pull_request:
+jobs:
+ test:
+ name: Test
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ erlang_version:
+ - "23"
+ - "24"
+ timeout-minutes: 120
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: MOUNT BAZEL CACHE
+ uses: actions/cache@v1
+ with:
+ path: "/home/runner/repo-cache/"
+ key: repo-cache
+ - name: CONFIGURE BAZEL
+ run: |
+ cat << EOF >> user.bazelrc
+ build:buildbuddy --remote_header=x-buildbuddy-api-key=${{ secrets.BUILDBUDDY_API_KEY }}
+
+ build:buildbuddy --build_metadata=ROLE=CI
+ build:buildbuddy --build_metadata=VISIBILITY=PUBLIC
+ build:buildbuddy --remote_instance_name=buildbuddy-io/buildbuddy/ci-${{ matrix.erlang_version }}
+ build:buildbuddy --repository_cache=/home/runner/repo-cache/
+ build:buildbuddy --color=yes
+ build:buildbuddy --disk_cache=
+ EOF
+
+ bazelisk info release
+ #! - name: Setup tmate session
+ #! uses: mxschmitt/action-tmate@v3
+ - name: RUN TESTS
+ run: |
+ sudo sysctl -w net.ipv4.tcp_keepalive_time=60
+ sudo ethtool -K eth0 tso off gso off gro off tx off rx off lro off
+ bazelisk test //... \
+ --config=rbe-${{ matrix.erlang_version }} \
+ --test_tag_filters=-exclusive,-aws,-mixed-version-cluster \
+ --verbose_failures
+ test-exclusive:
+ name: Test (Exclusive Tests)
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ erlang_version:
+ - "23"
+ - "24"
+ timeout-minutes: 60
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: CONFIGURE OTP & ELIXIR
+ uses: erlef/setup-beam@v1.9
+ with:
+ otp-version: ${{ matrix.erlang_version }}
+ elixir-version: 1.11.4
+ - name: MOUNT BAZEL CACHE
+ uses: actions/cache@v1
+ with:
+ path: "/home/runner/repo-cache/"
+ key: repo-cache
+ - name: CONFIGURE BAZEL
+ run: |
+ ERLANG_HOME="$(dirname $(dirname $(which erl)))"
+ ELIXIR_HOME="$(dirname $(dirname $(which iex)))"
+ cat << EOF >> user.bazelrc
+ build:buildbuddy --remote_header=x-buildbuddy-api-key=${{ secrets.BUILDBUDDY_API_KEY }}
+
+ build:buildbuddy --build_metadata=ROLE=CI
+ build:buildbuddy --build_metadata=VISIBILITY=PRIVATE
+ build:buildbuddy --remote_instance_name=buildbuddy-io/buildbuddy/ci-exclusive-${{ matrix.erlang_version }}
+ build:buildbuddy --repository_cache=/home/runner/repo-cache/
+ build:buildbuddy --color=yes
+ build:buildbuddy --disk_cache=
+
+ build --@bazel-erlang//:erlang_version=${{ matrix.erlang_version }}
+ build --@bazel-erlang//:erlang_home=${ERLANG_HOME}
+ build --//:elixir_home=${ELIXIR_HOME}
+ EOF
+ #! - name: Setup tmate session
+ #! uses: mxschmitt/action-tmate@v3
+ - name: RUN EXCLUSIVE TESTS
+ run: |
+ bazelisk test //... \
+ --config=buildbuddy \
+ --test_tag_filters=exclusive,-aws,-mixed-version-cluster \
+ --build_tests_only \
+ --test_env RABBITMQ_CT_HELPERS_DELETE_UNUSED_NODES=true \
+ --verbose_failures
diff --git a/.github/workflows/update-bazel-erlang.yaml b/.github/workflows/update-bazel-erlang.yaml
new file mode 100644
index 0000000000..6014da9d32
--- /dev/null
+++ b/.github/workflows/update-bazel-erlang.yaml
@@ -0,0 +1,45 @@
+name: Update bazel-erlang
+on:
+ schedule:
+ - cron: '0 3 * * *'
+ workflow_dispatch:
+jobs:
+ update-bazel-erlang:
+ name: Update bazel-erlang
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ with:
+ path: rabbitmq-server
+ - name: CHECKOUT bazel-erlang
+ uses: actions/checkout@v2.4.0
+ with:
+ repository: rabbitmq/bazel-erlang
+ path: bazel-erlang
+ - name: DETERMINE LATEST COMMIT
+ id: find-commit
+ working-directory: bazel-erlang
+ run: |
+ echo "::set-output name=SHA::$(git rev-parse HEAD)"
+ - name: UPDATE bazel-erlang COMMIT
+ working-directory: rabbitmq-server
+ run: |
+ sudo npm install --global --silent @bazel/buildozer
+ echo "$(cat WORKSPACE.bazel | npx buildozer 'set commit "${{ steps.find-commit.outputs.SHA }}"' -:bazel-erlang)" > WORKSPACE.bazel
+ git diff
+ - name: CREATE PULL REQUEST
+ uses: peter-evans/create-pull-request@v3
+ with:
+ token: ${{ secrets.REPO_SCOPED_TOKEN }}
+ committer: GitHub <noreply@github.com>
+ author: GitHub <noreply@github.com>
+ path: rabbitmq-server
+ title: Adopt latest bazel-erlang
+ commit-message: |
+ Adopt latest bazel-erlang
+
+ - bazel-erlang@${{ steps.find-commit.outputs.SHA }}
+ branch: bump-bazel-erlang
+ delete-branch: true \ No newline at end of file
diff --git a/.github/workflows/update-otp-for-oci.yaml b/.github/workflows/update-otp-for-oci.yaml
new file mode 100644
index 0000000000..d7acd89cd4
--- /dev/null
+++ b/.github/workflows/update-otp-for-oci.yaml
@@ -0,0 +1,67 @@
+name: Update OTP Versions for OCI Workflow
+on:
+ schedule:
+ - cron: '0 3 * * *'
+ workflow_dispatch:
+jobs:
+ update-rbe-images:
+ name: Update OTP Versions
+ runs-on: ubuntu-latest
+ strategy:
+ max-parallel: 1
+ matrix:
+ erlang_version:
+ - "24.1"
+ include:
+ - erlang_version: "24.1"
+ image_tag_suffix: 'otp-max'
+ timeout-minutes: 10
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ - name: SKIP IF THE PR ALREADY EXISTS
+ id: check-for-branch
+ run: |
+ set +e
+ git ls-remote --exit-code --heads origin bump-otp-for-oci
+ echo "::set-output name=c::$?"
+ - name: DETERMINE LATEST PATCH & SHA
+ if: steps.check-for-branch.outputs.c != 0
+ id: fetch-version
+ run: |
+ TAG_NAME=$(curl -s GET https://api.github.com/repos/erlang/otp/tags \
+ | jq -r 'map(select(.name | contains("OTP-${{ matrix.erlang_version }}"))) | first | .name')
+
+ wget --continue --quiet --output-document="/tmp/OTP.tar.gz" "https://github.com/erlang/otp/archive/${TAG_NAME}.tar.gz" && \
+ SHA=$(shasum -a 256 "/tmp/OTP.tar.gz" | awk '{print $1}')
+
+ echo "::set-output name=VERSION::${TAG_NAME#OTP-}"
+ echo "::set-output name=SHA::${SHA}"
+ - name: MODIFY VERSION FILE
+ if: steps.check-for-branch.outputs.c != 0
+ run: |
+ echo "Updating packaging/docker-image/${{ matrix.image_tag_suffix }}.yaml with:"
+ echo " otp -> ${{ steps.fetch-version.outputs.VERSION }}"
+ echo " otp_sha256 -> ${{ steps.fetch-version.outputs.SHA }}"
+
+ FILE=packaging/docker-image/otp-versions/${{ matrix.image_tag_suffix }}.yaml
+ sed -i "s/otp: .*/otp: '${{ steps.fetch-version.outputs.VERSION }}'/" $FILE
+ sed -i "s/otp_sha256: .*/otp_sha256: ${{ steps.fetch-version.outputs.SHA }}/" $FILE
+
+ set -x
+ git diff
+ - name: CREATE PULL REQUEST
+ if: steps.check-for-branch.outputs.c != 0
+ uses: peter-evans/create-pull-request@v3
+ with:
+ token: ${{ secrets.REPO_SCOPED_TOKEN }}
+ committer: GitHub <noreply@github.com>
+ author: GitHub <noreply@github.com>
+ title: Adopt otp ${{ steps.fetch-version.outputs.VERSION }} for OCI workflow
+ commit-message: |
+ Adopt otp ${{ steps.fetch-version.outputs.VERSION }} for OCI workflow
+ labels: |
+ backport-v3.9.x
+ backport-v3.8.x
+ branch: bump-otp-for-oci
+ delete-branch: true
diff --git a/.github/workflows/update-rbe-images.yaml b/.github/workflows/update-rbe-images.yaml
new file mode 100644
index 0000000000..a988b66a2f
--- /dev/null
+++ b/.github/workflows/update-rbe-images.yaml
@@ -0,0 +1,60 @@
+name: Update Bazel RBE Images
+on:
+ schedule:
+ - cron: '0 3 * * *'
+ workflow_dispatch:
+jobs:
+ update-rbe-images:
+ name: Update Bazel RBE Images
+ runs-on: ubuntu-latest
+ strategy:
+ max-parallel: 1
+ matrix:
+ erlang_version:
+ - "23.3"
+ - "24.1"
+ include:
+ - erlang_version: "23.3"
+ short_version: "23"
+ - erlang_version: "24.1"
+ short_version: "24"
+ timeout-minutes: 10
+ steps:
+ - name: CHECKOUT REPOSITORY
+ uses: actions/checkout@v2.4.0
+ with:
+ path: rabbitmq-server
+ - name: CHECKOUT rbe-erlang-platform REPOSITORY
+ uses: actions/checkout@v2.4.0
+ with:
+ path: rbe-erlang-platform
+ repository: rabbitmq/rbe-erlang-platform
+ ref: linux-erlang-${{ matrix.erlang_version }}
+ - name: DETERMINE LATEST COMMIT
+ id: find-commit
+ working-directory: rbe-erlang-platform
+ run: |
+ echo "::set-output name=SHA::$(git rev-parse HEAD)"
+ - name: UPDATE rbe-erlang-platform COMMIT
+ working-directory: rabbitmq-server
+ run: |
+ sudo npm install --global --silent @bazel/buildozer
+ echo "$(cat WORKSPACE.bazel | npx buildozer 'set commit "${{ steps.find-commit.outputs.SHA }}"' -:rbe_${{ matrix.short_version }})" > WORKSPACE.bazel
+ git diff
+ - name: CREATE PULL REQUEST
+ uses: peter-evans/create-pull-request@v3
+ with:
+ token: ${{ secrets.REPO_SCOPED_TOKEN }}
+ committer: GitHub <noreply@github.com>
+ author: GitHub <noreply@github.com>
+ title: Adopt latest rabbitmq-server-buildenv:linux-erlang-${{ matrix.erlang_version }} for RBE
+ path: rabbitmq-server
+ commit-message: |
+ Use latest rbe-erlang-platform/linux-erlang-${{ matrix.erlang_version }}
+
+ for remote build execution (RBE) with BuildBuddy
+ labels: |
+ backport-v3.9.x
+ backport-v3.8.x
+ branch: bump-rbe-${{ matrix.short_version }}
+ delete-branch: true
diff --git a/.gitignore b/.gitignore
index a39bfd51e5..f71d98b114 100644
--- a/.gitignore
+++ b/.gitignore
@@ -40,6 +40,9 @@
!/deps/rabbitmq_shovel_management/
!/deps/rabbitmq_stomp/
!/deps/rabbitmq_stream/
+!/deps/rabbitmq_stream_common/
+!/deps/rabbitmq_stream_management/
+!/deps/rabbitmq_stream_prometheus/
!/deps/rabbitmq_top/
!/deps/rabbitmq_tracing/
!/deps/rabbitmq_trust_store/
@@ -54,7 +57,10 @@
/plugins.lock
/sbin/
/sbin.lock
+erl_crash.dump
.envrc
+*.plt
+*.lock
/topic-branch-scratch/
@@ -66,3 +72,9 @@ rabbitmq-server-*.tar.gz
rabbitmq-server-*.tar.bz2
rabbitmq-server-*.tar.xz
rabbitmq-server-*.zip
+
+traces*
+callgrand*
+
+/user.bazelrc
+/bazel-*
diff --git a/BAZEL.md b/BAZEL.md
new file mode 100644
index 0000000000..9ab05a12cf
--- /dev/null
+++ b/BAZEL.md
@@ -0,0 +1,70 @@
+# [Bazel](https://www.bazel.build/) build
+
+From https://docs.bazel.build/versions/master/bazel-overview.html
+> Bazel is an open-source build and test tool similar to Make, Maven, and Gradle. It uses a human-readable, high-level build language. Bazel supports projects in multiple languages and builds outputs for multiple platforms. Bazel supports large codebases across multiple repositories, and large numbers of users.
+
+## Why RabbitMQ + Bazel?
+
+RabbitMQ, Tier1 plugins included, is a large codebase. The developer experience benefits from fast incremental compilation.
+
+More importantly, RabbitMQ's test suite is large and takes hours if run on a single machine. Bazel allows tests to be run in parallel on a large number of remote workers if needed, and furthermore uses cached test results when branches of the codebase remain unchanged.
+
+Bazel does not provide built in Erlang or Elixir support, nor is there an available library of bazel rules. Therefore, we have defined our own rules in https://github.com/rabbitmq/bazel-erlang. Elixir compilation is handled as a special case within this repository. To use these rules, the location of your Erlang and Elixir installations must be indicated to the build (see below).
+
+While most of work for running tests happens in Bazel, the suite still makes use of some external tools for commands, notably gnu `make` and `openssl`. Ideally we could bring all of these tools under bazel, so that the only tool needed would be `bazel` or `bazelisk`, but that will take some time.
+
+## Running Tests
+
+### Install Bazelisk
+
+On **macOS**:
+
+`brew install bazelisk`
+
+Otherwise:
+
+https://docs.bazel.build/versions/master/install-bazelisk.html
+
+### Create `user.bazelrc`
+
+Create a `user.bazelrc` by making a copy of `user-template.bazelrc` and updating the paths in the first few lines.
+
+### Run the broker
+
+`bazel run broker`
+
+### Running tests
+
+Many rabbit tests spawn single or clustered rabbit nodes, and therefore it's best to run test suites sequentially on a single machine. Hence the `build --local_test_jobs=1` flag used in `.bazelrc`. Additionally, it may be reasonable to disable test sharding and stream test output when running tests locally with `--test_output=streamed` as an additional argument (to just disable sharding, but not stream output, use `--test_sharding_strategy=disabled`). Naturally that restriction does not hold if utilizing remote execution (as is the case for RabbitMQ's CI pipelines).
+
+Erlang Common Test logs will not be placed in the logs directory when run with bazel. They can be found under `bazel-testlogs`. For instance, those of the rabbit application's backing_queue suite will be under `bazel-testlogs/deps/rabbit/backing_queue_SUITE/test.outputs/`.
+
+### Run all tests
+
+Note: This takes quite some time on a single machine.
+
+`bazel test //...`
+
+### Run tests in a 'package' and its 'subpackages'
+
+**rabbit** is an appropriate example because it encloses the **rabbitmq_prelaunch** application.
+
+`bazel test deps/rabbit/...`
+
+### Run tests for a specific 'package'
+
+`bazel test deps/rabbit_common:all`
+
+### Run an individual common test suite
+
+`bazel test //deps/rabbit:lazy_queue_SUITE`
+
+## Additional Useful Commands
+
+- Format all bazel files consistently (requires [buildifier](https://github.com/bazelbuild/buildtools/blob/master/buildifier/README.md)):
+
+ `buildifier -r .`
+
+- Remove unused load statements from BUILD.bazel files (requires [buildozer](https://github.com/bazelbuild/buildtools/blob/master/buildozer/README.md)):
+
+ `buildozer 'fix unusedLoads' //...:__pkg__`
diff --git a/BUILD.bats b/BUILD.bats
new file mode 100644
index 0000000000..ed50b3c253
--- /dev/null
+++ b/BUILD.bats
@@ -0,0 +1,5 @@
+filegroup(
+ name = "bin_dir",
+ srcs = glob(["bin/**/*", "libexec/**/*"]),
+ visibility = ["//visibility:public"],
+)
diff --git a/BUILD.bazel b/BUILD.bazel
new file mode 100644
index 0000000000..771f0f259c
--- /dev/null
+++ b/BUILD.bazel
@@ -0,0 +1,166 @@
+load("@rules_pkg//:pkg.bzl", "pkg_tar")
+load("@bazel-erlang//:dialyze.bzl", "plt")
+load("@bazel-erlang//:shell.bzl", "shell")
+load("elixir_home.bzl", "elixir_home")
+load(":rabbitmq_home.bzl", "rabbitmq_home")
+load(":rabbitmq_run.bzl", "rabbitmq_run", "rabbitmq_run_command")
+load(":rabbitmqctl.bzl", "rabbitmqctl")
+load(":rabbitmq.bzl", "ALL_PLUGINS", "APP_VERSION")
+load(":dist.bzl", "collect_licenses", "versioned_rabbitmq_home")
+
+exports_files([
+ "scripts/bazel/rabbitmq-run.sh",
+])
+
+# This allows us to
+# `bazel build //my/target \
+# --//:elixir_home=/path/to/elixir/installation`
+elixir_home(
+ name = "elixir_home",
+ build_setting_default = "~/.kiex/elixirs/elixir-1.10.4",
+ visibility = ["//visibility:public"],
+)
+
+platform(
+ name = "erlang_git_platform",
+ constraint_values = [
+ "@platforms//os:linux",
+ "@platforms//cpu:x86_64",
+ "@bazel_tools//tools/cpp:clang",
+ ],
+ exec_properties = {
+ "OSFamily": "Linux",
+ # linux-erlang-git-master
+ "container-image": "docker://pivotalrabbitmq/rabbitmq-server-buildenv@sha256:5d4fa38f723186668a497a8ee57e22010187e9347add5d4a7cc199dc13f1493f",
+ },
+)
+
+plt(
+ name = "base_plt",
+ visibility = ["//visibility:public"],
+)
+
+rabbitmq_home(
+ name = "broker-home",
+ plugins = ALL_PLUGINS,
+)
+
+rabbitmq_home(
+ name = "broker-for-cli-tests-home",
+ testonly = True,
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_federation:bazel_erlang_lib",
+ "//deps/rabbitmq_stomp:bazel_erlang_lib",
+ "//deps/amqp_client:bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-run",
+ home = ":broker-home",
+ visibility = ["//visibility:public"],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-cli-tests-run",
+ testonly = True,
+ home = ":broker-for-cli-tests-home",
+ visibility = ["//visibility:public"],
+)
+
+# Allow us to `bazel run broker`
+# for the equivalent of `make run-broker`
+# (though it as of yet includes no plugins)
+rabbitmq_run_command(
+ name = "broker",
+ rabbitmq_run = ":rabbitmq-run",
+ subcommand = "run-broker",
+)
+
+# `bazel run rabbitmqctl`
+rabbitmqctl(
+ name = "rabbitmqctl",
+ home = ":broker-home",
+ visibility = ["//visibility:public"],
+)
+
+rabbitmqctl(
+ name = "rabbitmq-diagnostics",
+ home = ":broker-home",
+)
+
+rabbitmqctl(
+ name = "rabbitmq-plugins",
+ home = ":broker-home",
+)
+
+shell(
+ name = "repl",
+ deps = ALL_PLUGINS,
+)
+
+collect_licenses(
+ name = "licenses",
+ srcs = glob(
+ ["LICENSE*"],
+ exclude = [
+ "LICENSE.md",
+ "LICENSE.txt",
+ ],
+ ),
+ deps = ALL_PLUGINS,
+)
+
+versioned_rabbitmq_home(
+ name = "dist-home",
+ plugins = ALL_PLUGINS,
+)
+
+pkg_tar(
+ name = "license-files",
+ srcs = [
+ ":licenses",
+ "//deps/rabbit:INSTALL",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+pkg_tar(
+ name = "scripts",
+ srcs = [
+ "scripts/bash_autocomplete.sh",
+ "scripts/rabbitmq-script-wrapper",
+ "scripts/rabbitmqctl-autocomplete.sh",
+ "scripts/zsh_autocomplete.sh",
+ ],
+ package_dir = "scripts",
+ visibility = ["//visibility:public"],
+)
+
+pkg_tar(
+ name = "release-notes",
+ srcs = glob([
+ "release-notes/*.md",
+ "release-notes/*.txt",
+ ]),
+ package_dir = "release-notes",
+ visibility = ["//visibility:public"],
+)
+
+pkg_tar(
+ name = "package-generic-unix",
+ srcs = [
+ ":dist-home",
+ ],
+ extension = "tar.xz",
+ package_dir = "rabbitmq_server-{}".format(APP_VERSION),
+ strip_prefix = "dist-home",
+ visibility = ["//visibility:public"],
+ deps = [
+ ":license-files",
+ ":release-notes",
+ ":scripts",
+ "//deps/rabbit:manpages-dir",
+ ],
+)
diff --git a/BUILD.inet_tcp_proxy b/BUILD.inet_tcp_proxy
new file mode 100644
index 0000000000..303e530984
--- /dev/null
+++ b/BUILD.inet_tcp_proxy
@@ -0,0 +1,8 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlang_lib")
+
+erlang_lib(
+ app_name = "inet_tcp_proxy_dist",
+ app_version = "0.1.0",
+ app_description = "Erlang distribution proxy to simulate network failures",
+ app_module = "inet_tcp_proxy_dist_app",
+)
diff --git a/BUILD.package_generic_unix b/BUILD.package_generic_unix
new file mode 100644
index 0000000000..4cc8056e7a
--- /dev/null
+++ b/BUILD.package_generic_unix
@@ -0,0 +1,46 @@
+load("@//:rabbitmq_package_generic_unix.bzl", "rabbitmq_package_generic_unix")
+load("@//:rabbitmq_run.bzl", "rabbitmq_run", "rabbitmq_run_command")
+load("@//:rabbitmqctl.bzl", "rabbitmqctl")
+
+rabbitmq_package_generic_unix(
+ name = "broker-home",
+ additional_files =
+ glob(
+ [
+ "sbin/*",
+ "escript/*",
+ ],
+ exclude = ["sbin/rabbitmqctl"],
+ ) + [
+ "//plugins:standard_plugins",
+ "//plugins:inet_tcp_proxy_ez",
+ ],
+ rabbitmqctl = "sbin/rabbitmqctl",
+)
+
+rabbitmq_run(
+ name = "rabbitmq-run",
+ home = ":broker-home",
+ visibility = ["//visibility:public"],
+)
+
+rabbitmq_run_command(
+ name = "broker",
+ rabbitmq_run = ":rabbitmq-run",
+ subcommand = "run-broker",
+)
+
+rabbitmqctl(
+ name = "rabbitmqctl",
+ home = ":broker-home",
+)
+
+rabbitmqctl(
+ name = "rabbitmq-diagnostics",
+ home = ":broker-home",
+)
+
+rabbitmqctl(
+ name = "rabbitmq-plugins",
+ home = ":broker-home",
+)
diff --git a/BUILD.ranch b/BUILD.ranch
new file mode 100644
index 0000000000..7a89e08841
--- /dev/null
+++ b/BUILD.ranch
@@ -0,0 +1,46 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "app_file", "bazel_erlang_lib", "erlc",
+"DEFAULT_ERLC_OPTS")
+
+FIRST_SRCS = [
+ "src/ranch_transport.erl",
+]
+
+erlc(
+ name = "first_beam_files",
+ hdrs = glob(["include/**/*.hrl", "src/**/*.hrl"]),
+ srcs = glob(FIRST_SRCS),
+ erlc_opts = DEFAULT_ERLC_OPTS,
+ dest = "ebin",
+)
+
+erlc(
+ name = "beam_files",
+ hdrs = glob(["include/**/*.hrl", "src/**/*.hrl"]),
+ srcs = glob(["src/**/*.erl"], exclude = FIRST_SRCS),
+ beam = [":first_beam_files"],
+ erlc_opts = DEFAULT_ERLC_OPTS,
+ dest = "ebin",
+)
+
+genrule(
+ name = "appup",
+ srcs = ["src/ranch.appup"],
+ outs = ["ebin/ranch.appup"],
+ cmd = "cp $< $@",
+)
+
+app_file(
+ name = "app_file",
+ app_name = "ranch",
+ app_version = "2.1.0",
+ modules = [":first_beam_files", ":beam_files"],
+)
+
+bazel_erlang_lib(
+ name = "bazel_erlang_lib",
+ app_name = "ranch",
+ app = ":app_file",
+ hdrs = glob(["include/**/*.hrl"]),
+ beam = [":first_beam_files", ":beam_files", ":appup"],
+ visibility = ["//visibility:public"],
+)
diff --git a/BUILD.trust_store_http b/BUILD.trust_store_http
new file mode 100644
index 0000000000..9fceaaff5c
--- /dev/null
+++ b/BUILD.trust_store_http
@@ -0,0 +1,15 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlang_lib")
+
+erlang_lib(
+ app_name = "trust_store_http",
+ app_description = "Trust store HTTP server",
+ app_module = "trust_store_http_app",
+ app_version = "1.0.0",
+ extra_apps = [
+ "ssl",
+ ],
+ deps = [
+ "@cowboy//:bazel_erlang_lib",
+ "@jsx//:bazel_erlang_lib",
+ ],
+)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 23a92fef9c..44de22c86c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -8,31 +8,52 @@ Pull requests is the primary place of discussing code changes.
The process is fairly standard:
* Fork the repository or repositories you plan on contributing to
- * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
- * `cd umbrella`, `make co`
+ * Run `make`
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+## Running Tests
+
+To run all tests in a particular project:
+
+```
+cd deps/rabbit
+make tests
+```
+
+To run a specific suite:
+
+```
+cd deps/rabbit
+make ct-cluster_rename
+```
+
+You can also run specific test groups and tests:
+
+```
+cd deps/rabbit
+make ct-cluster_rename t=cluster_size_3:partial_one_by_one
+```
+
+Test output is in the `logs/` subdirectory.
## Code of Conduct
See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
-
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
-
## Where to Ask Questions
If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/Makefile b/Makefile
index d1ac26543d..1629fd31a8 100644
--- a/Makefile
+++ b/Makefile
@@ -13,7 +13,15 @@ PACKAGES_DIR ?= $(abspath PACKAGES)
# List of plugins to include in a RabbitMQ release.
include plugins.mk
-DEPS = rabbit_common rabbit $(PLUGINS)
+# An additional list of plugins to include in a RabbitMQ release,
+# on top of the standard plugins. For example, looking_glass.
+#
+# Note: When including NIFs in a release make sure to build
+# them on the appropriate platform for the target environment.
+# For example build looking_glass on Linux when targeting Docker.
+ADDITIONAL_PLUGINS ?=
+
+DEPS = rabbit_common rabbit $(PLUGINS) $(ADDITIONAL_PLUGINS)
DEP_PLUGINS = rabbit_common/mk/rabbitmq-dist.mk \
rabbit_common/mk/rabbitmq-run.mk \
@@ -33,8 +41,8 @@ endif
include rabbitmq-components.mk
include erlang.mk
-include mk/stats.mk
include mk/github-actions.mk
+include mk/bazel.mk
include mk/topic-branches.mk
# --------------------------------------------------------------------
@@ -205,8 +213,8 @@ $(SOURCE_DIST): $(ERLANG_MK_RECURSIVE_DEPS_LIST)
mix_exs=$@/deps/$$(basename $$dep)/mix.exs; \
if test -f $$mix_exs; then \
(cd $$(dirname "$$mix_exs") && \
- env DEPS_DIR=$@/deps HOME=$@/deps MIX_ENV=prod FILL_HEX_CACHE=yes mix local.hex --force && \
- env DEPS_DIR=$@/deps HOME=$@/deps MIX_ENV=prod FILL_HEX_CACHE=yes mix deps.get --only prod && \
+ (test -d $@/deps/.hex || env DEPS_DIR=$@/deps MIX_HOME=$@/deps/.mix HEX_HOME=$@/deps/.hex MIX_ENV=prod FILL_HEX_CACHE=yes mix local.hex --force) && \
+ env DEPS_DIR=$@/deps MIX_HOME=$@/deps/.mix HEX_HOME=$@/deps/.hex MIX_ENV=prod FILL_HEX_CACHE=yes mix deps.get --only prod && \
cp $(CURDIR)/mk/rabbitmq-mix.mk . && \
rm -rf _build deps); \
fi; \
@@ -248,7 +256,7 @@ $(SOURCE_DIST): $(ERLANG_MK_RECURSIVE_DEPS_LIST)
#
# The ETS file must be recreated before compiling RabbitMQ. See the
# `restore-hex-cache-ets-file` Make target.
- $(verbose) $(call erlang,$(call dump_hex_cache_to_erl_term,$@,$@.git-time.txt))
+ $(verbose) $(call erlang,$(call dump_hex_cache_to_erl_term,$(call core_native_path,$@),$(call core_native_path,$@.git-time.txt)))
# Fix file timestamps to have reproducible source archives.
$(verbose) find $@ -print0 | xargs -0 touch -t "$$(cat "$@.git-time.txt")"
$(verbose) rm "$@.git-times.txt" "$@.git-time.txt"
@@ -339,6 +347,9 @@ clean-unpacked-source-dist:
fi; \
done
+clean-deps:
+ git clean -xfffd deps
+
# --------------------------------------------------------------------
# Packaging.
# --------------------------------------------------------------------
@@ -353,15 +364,28 @@ clean-unpacked-source-dist:
# archive.
PACKAGES_SOURCE_DIST_FILE ?= $(firstword $(SOURCE_DIST_FILES))
-packages package-deb package-rpm \
+RABBITMQ_PACKAGING_TARGETS = package-deb package-rpm \
package-rpm-redhat package-rpm-fedora package-rpm-rhel6 package-rpm-rhel7 \
package-rpm-rhel8 package-rpm-suse package-rpm-opensuse package-rpm-sles11 \
-package-windows \
+package-windows
+
+ifneq ($(filter $(RABBITMQ_PACKAGING_TARGETS),$(MAKECMDGOALS)),)
+ifeq ($(RABBITMQ_PACKAGING_REPO),)
+$(error Cannot find rabbitmq-packaging repository dir; please clone from rabbitmq/rabbitmq-packaging and specify RABBITMQ_PACKAGING_REPO)
+endif
+endif
+
+$(RABBITMQ_PACKAGING_TARGETS): $(PACKAGES_SOURCE_DIST_FILE)
+ $(verbose) $(MAKE) -C $(RABBITMQ_PACKAGING_REPO) $@ \
+ SOURCE_DIST_FILE=$(abspath $(PACKAGES_SOURCE_DIST_FILE))
+
package-generic-unix \
docker-image: $(PACKAGES_SOURCE_DIST_FILE)
$(verbose) $(MAKE) -C packaging $@ \
SOURCE_DIST_FILE=$(abspath $(PACKAGES_SOURCE_DIST_FILE))
+packages: package-deb package-rpm package-windows package-generic-unix
+
# --------------------------------------------------------------------
# Installation.
# --------------------------------------------------------------------
@@ -394,7 +418,9 @@ SCRIPTS = rabbitmq-defaults \
rabbitmq-plugins \
rabbitmq-diagnostics \
rabbitmq-queues \
- rabbitmq-upgrade
+ rabbitmq-upgrade \
+ rabbitmq-streams \
+ rabbitmq-tanzu
AUTOCOMPLETE_SCRIPTS = bash_autocomplete.sh zsh_autocomplete.sh
@@ -407,6 +433,8 @@ WINDOWS_SCRIPTS = rabbitmq-defaults.bat \
rabbitmq-server.bat \
rabbitmq-service.bat \
rabbitmq-upgrade.bat \
+ rabbitmq-streams.bat \
+ rabbitmq-tanzu.bat \
rabbitmqctl.bat
UNIX_TO_DOS ?= todos
@@ -475,8 +503,6 @@ install-windows-erlapp: dist
$(verbose) mkdir -p $(DESTDIR)$(WINDOWS_PREFIX)
$(inst_verbose) cp -r \
LICENSE* \
- $(DEPS_DIR)/rabbit/ebin \
- $(DEPS_DIR)/rabbit/priv \
$(DEPS_DIR)/rabbit/INSTALL \
$(DIST_DIR) \
$(DESTDIR)$(WINDOWS_PREFIX)
@@ -484,16 +510,6 @@ install-windows-erlapp: dist
> $(DESTDIR)$(WINDOWS_PREFIX)/$(notdir $(DIST_DIR))/README.txt
$(verbose) $(UNIX_TO_DOS) $(DESTDIR)$(WINDOWS_PREFIX)/plugins/README.txt
- @# FIXME: Why do we copy headers?
- $(verbose) cp -r \
- $(DEPS_DIR)/rabbit/include \
- $(DESTDIR)$(WINDOWS_PREFIX)
- @# rabbitmq-common provides headers too: copy them to
- @# rabbitmq_server/include.
- $(verbose) cp -r \
- $(DEPS_DIR)/rabbit_common/include \
- $(DESTDIR)$(WINDOWS_PREFIX)
-
install-windows-escripts:
$(verbose) $(MAKE) -C $(DEPS_DIR)/rabbitmq_cli install \
PREFIX="$(WINDOWS_PREFIX)/$(CLI_ESCRIPTS_DIR)"
diff --git a/README.md b/README.md
index 28bb2699fd..b4bdb265a4 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,4 @@
-[![OTP v22.3](https://img.shields.io/github/workflow/status/rabbitmq/rabbitmq-server/Test%20-%20Erlang%2022.3/master?label=Erlang%2022.3)](https://github.com/rabbitmq/rabbitmq-server/actions?query=workflow%3A%22Test+-+Erlang+22.3%22+branch%3A%22master%22)
-[![OTP v23](https://img.shields.io/github/workflow/status/rabbitmq/rabbitmq-server/Test%20-%20Erlang%2023.1/master?label=Erlang%2023.1)](https://github.com/rabbitmq/rabbitmq-server/actions?query=workflow%3A%22Test+-+Erlang+23.1%22+branch%3Amaster)
+[![Test](https://github.com/rabbitmq/rabbitmq-server/actions/workflows/test.yaml/badge.svg)](https://github.com/rabbitmq/rabbitmq-server/actions/workflows/test.yaml)
# RabbitMQ Server
@@ -25,19 +24,28 @@
* [RabbitMQ tutorials](https://rabbitmq.com/getstarted.html)
* [All documentation guides](https://rabbitmq.com/documentation.html)
+
+Some key doc guides include
+
* [CLI tools guide](https://rabbitmq.com/cli.html)
+ * [Clustering](https://www.rabbitmq.com/clustering.html) and [Cluster Formation](https://www.rabbitmq.com/cluster-formation.html) guides
* [Configuration guide](https://rabbitmq.com/configure.html)
* [Client libraries and tools](https://rabbitmq.com/devtools.html)
- * [Monitoring guide](https://rabbitmq.com/monitoring.html)
+ * [Monitoring](https://rabbitmq.com/monitoring.html) and [Prometheus/Grafana](https://www.rabbitmq.com/prometheus.html) guides
* [Production checklist](https://rabbitmq.com/production-checklist.html)
+ * [Quorum queues](https://rabbitmq.com/quorum-queues.html): a replicated, data safety- and consistency-oriented queue type
* [Runnable tutorials](https://github.com/rabbitmq/rabbitmq-tutorials/)
* [Documentation source](https://github.com/rabbitmq/rabbitmq-website/)
-## Getting Help
+## Commercial Support
+
+* [Commercial support](https://rabbitmq.com/services.html) from [VMware](https://vmware.com) for open source RabbitMQ
+
+## Getting Help from the Community
+ * [GitHub Discussions](https://github.com/rabbitmq/rabbitmq-server/discussions/)
* [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
- * [Commercial support](https://rabbitmq.com/services.html) from [Pivotal](https://pivotal.io) for open source RabbitMQ
* [Community Slack](https://rabbitmq-slack.herokuapp.com/)
* `#rabbitmq` on Freenode
@@ -62,4 +70,4 @@ RabbitMQ server is [licensed under the MPL 2.0](LICENSE-MPL-RabbitMQ).
## Copyright
-(c) 2007-2020 VMware, Inc. or its affiliates.
+(c) 2007-2021 VMware, Inc. or its affiliates.
diff --git a/WORKSPACE.bazel b/WORKSPACE.bazel
new file mode 100644
index 0000000000..00e4d4bebd
--- /dev/null
+++ b/WORKSPACE.bazel
@@ -0,0 +1,107 @@
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
+
+http_archive(
+ name = "rules_pkg",
+ sha256 = "038f1caa773a7e35b3663865ffb003169c6a71dc995e39bf4815792f385d837d",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.4.0/rules_pkg-0.4.0.tar.gz",
+ "https://github.com/bazelbuild/rules_pkg/releases/download/0.4.0/rules_pkg-0.4.0.tar.gz",
+ ],
+)
+
+load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies")
+
+rules_pkg_dependencies()
+
+http_archive(
+ name = "io_buildbuddy_buildbuddy_toolchain",
+ sha256 = "48546946879b1fd2dcba327ba15776c822f2ce9a9ef1077be9bf3ecadcc1564a",
+ strip_prefix = "buildbuddy-toolchain-b2f5e7e3b126c6d7cf243227147478c0959bfc95",
+ urls = ["https://github.com/buildbuddy-io/buildbuddy-toolchain/archive/b2f5e7e3b126c6d7cf243227147478c0959bfc95.zip"],
+)
+
+load("@io_buildbuddy_buildbuddy_toolchain//:deps.bzl", "buildbuddy_deps")
+
+buildbuddy_deps()
+
+load("@io_buildbuddy_buildbuddy_toolchain//:rules.bzl", "buildbuddy")
+
+buildbuddy(
+ name = "buildbuddy_toolchain",
+ llvm = True,
+)
+
+git_repository(
+ name = "rbe_23",
+ commit = "b21c066e426de48e526cc0f8c5158b7024d04e85",
+ remote = "https://github.com/rabbitmq/rbe-erlang-platform.git",
+)
+
+git_repository(
+ name = "rbe_24",
+ commit = "c8cbf65e2facbe464ebbcee7b6cf6f7a2d422ded",
+ remote = "https://github.com/rabbitmq/rbe-erlang-platform.git",
+)
+
+http_archive(
+ name = "rules_pkg",
+ sha256 = "038f1caa773a7e35b3663865ffb003169c6a71dc995e39bf4815792f385d837d",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.4.0/rules_pkg-0.4.0.tar.gz",
+ "https://github.com/bazelbuild/rules_pkg/releases/download/0.4.0/rules_pkg-0.4.0.tar.gz",
+ ],
+)
+
+load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies")
+
+rules_pkg_dependencies()
+
+git_repository(
+ name = "bazel-erlang",
+ commit = "050faedb2a3422a60d6b98678c714ed1a61ec71d",
+ remote = "https://github.com/rabbitmq/bazel-erlang.git",
+)
+
+load("@bazel-erlang//:bazel_erlang.bzl", "bazel_erlang_deps")
+
+bazel_erlang_deps()
+
+load("//:workspace_helpers.bzl", "rabbitmq_external_deps")
+
+rabbitmq_external_deps(rabbitmq_workspace = "@")
+
+load("//deps/amqp10_client:activemq.bzl", "activemq_archive")
+
+activemq_archive()
+
+ADD_PLUGINS_DIR_BUILD_FILE = """set -euo pipefail
+
+cat << EOF > plugins/BUILD.bazel
+load("@rules_pkg//:pkg.bzl", "pkg_zip")
+
+pkg_zip(
+ name = "inet_tcp_proxy_ez",
+ package_dir = "inet_tcp_proxy/ebin",
+ srcs = [
+ "@inet_tcp_proxy//:bazel_erlang_lib",
+ ],
+ package_file_name = "inet_tcp_proxy.ez",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "standard_plugins",
+ srcs = glob(["*.ez"]),
+ visibility = ["//visibility:public"],
+)
+EOF
+"""
+
+http_archive(
+ name = "rabbitmq-server-generic-unix-3.8.22",
+ build_file = "@//:BUILD.package_generic_unix",
+ patch_cmds = [ADD_PLUGINS_DIR_BUILD_FILE],
+ strip_prefix = "rabbitmq_server-3.8.22",
+ urls = ["https://github.com/rabbitmq/rabbitmq-server/releases/download/v3.8.22/rabbitmq-server-generic-unix-3.8.22.tar.xz"],
+)
diff --git a/ci/dockerfiles/22.3/erlang_elixir b/ci/dockerfiles/22.3/erlang_elixir
deleted file mode 100644
index 524d9af685..0000000000
--- a/ci/dockerfiles/22.3/erlang_elixir
+++ /dev/null
@@ -1,18 +0,0 @@
-FROM erlang:22.3
-
-ENV ERLANG_VERSION 22.3
-
-# elixir expects utf8.
-ENV ELIXIR_VERSION="v1.10.4" \
- LANG=C.UTF-8
-
-RUN set -xe \
- && ELIXIR_DOWNLOAD_URL="https://github.com/elixir-lang/elixir/archive/${ELIXIR_VERSION}.tar.gz" \
- && ELIXIR_DOWNLOAD_SHA256="8518c78f43fe36315dbe0d623823c2c1b7a025c114f3f4adbb48e04ef63f1d9f" \
- && curl -fSL -o elixir-src.tar.gz $ELIXIR_DOWNLOAD_URL \
- && echo "$ELIXIR_DOWNLOAD_SHA256 elixir-src.tar.gz" | sha256sum -c - \
- && mkdir -p /usr/local/src/elixir \
- && tar -xzC /usr/local/src/elixir --strip-components=1 -f elixir-src.tar.gz \
- && rm elixir-src.tar.gz \
- && cd /usr/local/src/elixir \
- && make install clean
diff --git a/ci/dockerfiles/23.1/erlang_elixir b/ci/dockerfiles/23.1/erlang_elixir
deleted file mode 100644
index 8fbefe953b..0000000000
--- a/ci/dockerfiles/23.1/erlang_elixir
+++ /dev/null
@@ -1,18 +0,0 @@
-FROM erlang:23.1
-
-ENV ERLANG_VERSION 23.1
-
-# elixir expects utf8.
-ENV ELIXIR_VERSION="v1.10.4" \
- LANG=C.UTF-8
-
-RUN set -xe \
- && ELIXIR_DOWNLOAD_URL="https://github.com/elixir-lang/elixir/archive/${ELIXIR_VERSION}.tar.gz" \
- && ELIXIR_DOWNLOAD_SHA256="8518c78f43fe36315dbe0d623823c2c1b7a025c114f3f4adbb48e04ef63f1d9f" \
- && curl -fSL -o elixir-src.tar.gz $ELIXIR_DOWNLOAD_URL \
- && echo "$ELIXIR_DOWNLOAD_SHA256 elixir-src.tar.gz" | sha256sum -c - \
- && mkdir -p /usr/local/src/elixir \
- && tar -xzC /usr/local/src/elixir --strip-components=1 -f elixir-src.tar.gz \
- && rm elixir-src.tar.gz \
- && cd /usr/local/src/elixir \
- && make install clean
diff --git a/ci/dockerfiles/ci b/ci/dockerfiles/ci
deleted file mode 100644
index fda59d27d8..0000000000
--- a/ci/dockerfiles/ci
+++ /dev/null
@@ -1,42 +0,0 @@
-ARG ERLANG_VERSION
-
-FROM eu.gcr.io/cf-rabbitmq-core/ci-base:${ERLANG_VERSION}
-
-ARG GITHUB_RUN_ID
-ARG BUILDEVENT_APIKEY
-
-ARG GITHUB_SHA
-
-ARG base_rmq_ref
-ARG current_rmq_ref
-
-ARG RABBITMQ_VERSION
-
-ENV GITHUB_RUN_ID $GITHUB_RUN_ID
-ENV GITHUB_SHA $GITHUB_SHA
-
-ENV base_rmq_ref $base_rmq_ref
-ENV current_rmq_ref $current_rmq_ref
-
-ENV RABBITMQ_VERSION $RABBITMQ_VERSION
-
-ENV BUILDEVENT_CIPROVIDER GitHubActions
-
-WORKDIR /workspace/rabbitmq
-
-COPY . .
-
-ENV UNPRIVILEGED_USER=rabbitmq
-RUN useradd \
- --create-home \
- --comment 'CI unprivileged user' \
- ${UNPRIVILEGED_USER}
-
-RUN chown --recursive ${UNPRIVILEGED_USER} /workspace
-
-USER ${UNPRIVILEGED_USER}
-
-RUN BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
- buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-prepare deps -- \
- make deps test-deps \
- RABBITMQ_VERSION=${RABBITMQ_VERSION}
diff --git a/ci/dockerfiles/ci-base b/ci/dockerfiles/ci-base
deleted file mode 100644
index 0eb4624030..0000000000
--- a/ci/dockerfiles/ci-base
+++ /dev/null
@@ -1,20 +0,0 @@
-ARG ERLANG_VERSION
-
-FROM eu.gcr.io/cf-rabbitmq-core/erlang_elixir:${ERLANG_VERSION}
-
-RUN apt-get update && apt-get install -y rsync zip
-
-RUN curl -L -o buildevents https://github.com/honeycombio/buildevents/releases/latest/download/buildevents-linux-amd64
-RUN chmod 755 buildevents
-RUN mv buildevents /usr/bin/
-
-WORKDIR /workspace
-
-COPY ci/scripts/fetch_secondary_umbrellas.sh .
-
-# If we clone the monorepo at a ref when the monorepo was still rabbitmq-server,
-# then we just get rabbitmq-server (not the monorepo as it would have looked, had
-# it existed at that time). So for the time being, secondary umbrellas will derive
-# from rabbitmq-public-umbrella (as they always have)
-ARG SECONDARY_UMBRELLA_GITREFS
-RUN bash fetch_secondary_umbrellas.sh ${SECONDARY_UMBRELLA_GITREFS} \ No newline at end of file
diff --git a/ci/dockerfiles/ci-dep b/ci/dockerfiles/ci-dep
deleted file mode 100644
index 602f7e3cdd..0000000000
--- a/ci/dockerfiles/ci-dep
+++ /dev/null
@@ -1,19 +0,0 @@
-ARG IMAGE_TAG
-
-FROM eu.gcr.io/cf-rabbitmq-core/ci:${IMAGE_TAG}
-
-ARG BUILDEVENT_APIKEY
-ARG project
-
-WORKDIR /workspace/rabbitmq/deps/${project}
-
-RUN BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
- buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-${project} test-build -- \
- make test-build
-
-RUN BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
- buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-${project} tests -- \
- make eunit \
- FULL= \
- FAIL_FAST=1 \
- SKIP_AS_ERROR=1
diff --git a/ci/scripts/collect.sh b/ci/scripts/collect.sh
deleted file mode 100755
index 8ebbe0e8b4..0000000000
--- a/ci/scripts/collect.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-echo "Recording buildevents step finish for ${project} started at ${STEP_START}..."
-buildevents step ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-${project} ${STEP_START} ${project}
-echo "done."
diff --git a/ci/scripts/ct-suite.sh b/ci/scripts/ct-suite.sh
deleted file mode 100755
index a8e47c995c..0000000000
--- a/ci/scripts/ct-suite.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-cd /workspace/rabbitmq/deps/$project
-
-! test -d ebin || touch ebin/*
-
-trap 'catch $?' EXIT
-
-catch() {
- if [ "$1" != "0" ]; then
- make ct-logs-archive && mv *-ct-logs-*.tar.xz /workspace/ct-logs/
- fi
-}
-
-CMD=ct-${CT_SUITE}
-SECONDARY_UMBRELLA_ARGS=""
-if [[ "${SECONDARY_UMBRELLA_VERSION:-}" != "" ]]; then
- CMD=ct-${CT_SUITE}-mixed-${SECONDARY_UMBRELLA_VERSION}
- SECONDARY_UMBRELLA_ARGS="SECONDARY_UMBRELLA=/workspace/rabbitmq-${SECONDARY_UMBRELLA_VERSION} RABBITMQ_FEATURE_FLAGS="
-fi
-
-buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-${project} ${CMD} -- \
- make ct-${CT_SUITE} \
- FULL= \
- FAIL_FAST=1 \
- SKIP_AS_ERROR=1 ${SECONDARY_UMBRELLA_ARGS}
diff --git a/ci/scripts/dialyze.sh b/ci/scripts/dialyze.sh
deleted file mode 100755
index fbb8e6dc54..0000000000
--- a/ci/scripts/dialyze.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-cd /workspace/rabbitmq/deps/$project
-
-buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-dialyze ${project} -- \
- make dialyze
diff --git a/ci/scripts/fetch_secondary_umbrellas.sh b/ci/scripts/fetch_secondary_umbrellas.sh
deleted file mode 100755
index 466ef4a765..0000000000
--- a/ci/scripts/fetch_secondary_umbrellas.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-refs="$@"
-
-for version in ${refs}; do
- umbrella="umbrellas/$version"
- if ! test -d "$umbrella" ||
- ! make -C "$umbrella/deps/rabbit" test-dist; then
- rm -rf "$umbrella"
-
- # Fetch the master Umbrella; the final umbrellas are created from
- # the master copy.
- if ! test -d umbrellas/master; then
- git config --global advice.detachedHead false
- git clone \
- https://github.com/rabbitmq/rabbitmq-public-umbrella.git \
- umbrellas/master
- make -C umbrellas/master co # To get RabbitMQ components.
- fi
-
- # We copy the master Umbrella and checkout the appropriate tag.
- cp -a umbrellas/master "$umbrella"
- git -C "$umbrella" checkout "master"
- make -C "$umbrella" up BRANCH="$version"
- # To remove third-party deps which were checked out when the
- # projects were on the `master` branch. Thus, possibly not the
- # version pinning we expect. We update the Umbrella one last time
- # to fetch the correct third-party deps.
- make -C "$umbrella" clean-3rd-party-repos
- make -C "$umbrella" up
- make -C "$umbrella/deps/rabbit" test-dist
- rm -rf "$umbrella"/deps/rabbitmq_website
- rm -rf "$umbrella"/deps/rabbitmq_prometheus/docker
- rm -rf "$umbrella"/deps/*/{.git,test} "$umbrella"/.git
- fi
-done
-
-for version in ${refs}; do
- umbrella="umbrellas/$version"
- mv ${umbrella} rabbitmq-${version}
-done
-
-rm -fr umbrellas
diff --git a/ci/scripts/finish.sh b/ci/scripts/finish.sh
deleted file mode 100755
index c6047e1681..0000000000
--- a/ci/scripts/finish.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-buildevents build ${GITHUB_RUN_ID} ${BUILD_START} ${BUILD_RESULT}
diff --git a/ci/scripts/package_generic_unix.sh b/ci/scripts/package_generic_unix.sh
deleted file mode 100755
index 820821ab30..0000000000
--- a/ci/scripts/package_generic_unix.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-cd /workspace/rabbitmq
-
-trap 'catch $?' EXIT
-
-catch() {
- buildevents step ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-packaging ${STEP_START} packaging
-}
-
-buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-packaging package-generic-unix -- \
- make package-generic-unix
diff --git a/ci/scripts/rabbitmq_cli.sh b/ci/scripts/rabbitmq_cli.sh
deleted file mode 100755
index d21608df22..0000000000
--- a/ci/scripts/rabbitmq_cli.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-cd /workspace/rabbitmq/deps/$project
-
-trap 'catch $?' EXIT
-
-SPAN_ID=${GITHUB_RUN_ID}-${project}
-
-catch() {
- buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} stop-node -- \
- make stop-node -C ../.. \
- DEPS_DIR=/workspace/rabbitmq/deps \
- PLUGINS='rabbitmq_federation rabbitmq_stomp'
-
- if [ "$1" != "0" ]; then
- tar -c -f - /tmp/rabbitmq-test-instances/*/log | \
- xz > /workspace/broker-logs/broker-logs.tar.xz
- fi
-
- buildevents step ${GITHUB_RUN_ID} ${SPAN_ID} ${STEP_START} ${project}
-}
-
-buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} make -- \
- make DEPS_DIR=/workspace/rabbitmq/deps
-
-buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} start-background-broker -- \
- make start-background-broker \
- -C ../.. \
- DEPS_DIR=/workspace/rabbitmq/deps \
- PLUGINS='rabbitmq_federation rabbitmq_stomp'
-
-buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} rebar -- \
- mix local.rebar --force
-
-# due to https://github.com/elixir-lang/elixir/issues/7699 we
-# "run" the tests, but skip them all, in order to trigger
-# compilation of all *_test.exs files before we actually run themq
-buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} compile-tests -- \
- make tests \
- MIX_TEST_OPTS="--exclude test" \
- DEPS_DIR=/workspace/rabbitmq/deps
-
-# rabbitmq-diagnostics erlang-cookie-sources reads USER from then env
-export USER=$(whoami)
-buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} tests -- \
- make tests \
- DEPS_DIR=/workspace/rabbitmq/deps
diff --git a/ci/scripts/tests.sh b/ci/scripts/tests.sh
deleted file mode 100755
index abf4bc865d..0000000000
--- a/ci/scripts/tests.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-cd /workspace/rabbitmq/deps/$project
-
-trap 'catch $?' EXIT
-
-SPAN_ID=${GITHUB_RUN_ID}-${project}
-
-catch() {
- if [ "$1" != "0" ]; then
- make ct-logs-archive && mv *-ct-logs-*.tar.xz /workspace/ct-logs/
- fi
-
- buildevents step ${GITHUB_RUN_ID} ${SPAN_ID} ${STEP_START} ${project}
-}
-
-buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} test-build -- \
- make test-build
-
-buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} tests -- \
- make tests \
- FULL= \
- FAIL_FAST=1 \
- SKIP_AS_ERROR=1
diff --git a/ci/scripts/validate-workflow.sh b/ci/scripts/validate-workflow.sh
deleted file mode 100755
index 3614953ee3..0000000000
--- a/ci/scripts/validate-workflow.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-cd deps/${project}
-
-trap 'catch $?' EXIT
-
-catch() {
- rm expected_suites.txt actual_suites.txt
-}
-
-touch expected_suites.txt
-for arg in "$@"; do
- echo "test/${arg}_SUITE.erl" >> expected_suites.txt
-done
-sort -o expected_suites.txt expected_suites.txt
-
-touch actual_suites.txt
-for f in test/*_SUITE.erl; do
- echo "$f" >> actual_suites.txt
-done
-sort -o actual_suites.txt actual_suites.txt
-
-set -x
-diff actual_suites.txt expected_suites.txt
diff --git a/ci/scripts/xref.sh b/ci/scripts/xref.sh
deleted file mode 100755
index 5e37ca833d..0000000000
--- a/ci/scripts/xref.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-
-set -euo pipefail
-
-cd /workspace/rabbitmq/deps/$project
-
-buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-xref ${project} -- \
- make xref
diff --git a/deps/amqp10_client/BUILD.bazel b/deps/amqp10_client/BUILD.bazel
new file mode 100644
index 0000000000..609e5da555
--- /dev/null
+++ b/deps/amqp10_client/BUILD.bazel
@@ -0,0 +1,102 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+load(
+ "//:rabbitmq.bzl",
+ "assert_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "amqp10_client"
+
+APP_DESCRIPTION = "AMQP 1.0 client from the RabbitMQ Project"
+
+APP_MODULE = "amqp10_client_app"
+
+EXTRA_APPS = [
+ "ssl",
+ "inets",
+ "crypto",
+]
+
+BUILD_DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/amqp10_common:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ extra_apps = EXTRA_APPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+rabbitmq_home(
+ name = "broker-for-tests-home",
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_amqp1_0:bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ home = ":broker-for-tests-home",
+)
+
+PACKAGE = "deps/amqp10_client"
+
+suites = [
+ rabbitmq_suite(
+ name = "msg_SUITE",
+ deps = DEPS,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ size = "medium",
+ additional_hdrs = [
+ "src/amqp10_client.hrl",
+ ],
+ additional_srcs = [
+ "test/activemq_ct_helpers.erl",
+ "test/mock_server.erl",
+ ],
+ data = [
+ "@activemq//:exec_dir",
+ ],
+ erlc_opts = [
+ "-I deps/amqp10_client",
+ ],
+ test_env = {
+ "ACTIVEMQ": "$TEST_SRCDIR/$TEST_WORKSPACE/external/activemq/bin/activemq",
+ },
+ deps = DEPS,
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/amqp10_client/Makefile b/deps/amqp10_client/Makefile
index 6f84e2cfee..59131678c9 100644
--- a/deps/amqp10_client/Makefile
+++ b/deps/amqp10_client/Makefile
@@ -24,8 +24,8 @@ dep_elvis_mk = git https://github.com/inaka/elvis.mk.git master
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
# --------------------------------------------------------------------
# Compiler flags.
diff --git a/deps/amqp10_client/activemq.bzl b/deps/amqp10_client/activemq.bzl
new file mode 100644
index 0000000000..7ad2f5cb4d
--- /dev/null
+++ b/deps/amqp10_client/activemq.bzl
@@ -0,0 +1,19 @@
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+ACTIVEMQ_VERSION = "5.14.4"
+ACTIVEMQ_URL = "https://archive.apache.org/dist/activemq/{version}/apache-activemq-{version}-bin.tar.gz".format(version = ACTIVEMQ_VERSION)
+SHA_256 = "16ec52bece0a4759f9d70f4132d7d8da67d662e4af029081c492e65510a695c1"
+
+def activemq_archive():
+ http_archive(
+ name = "activemq",
+ urls = [ACTIVEMQ_URL],
+ sha256 = SHA_256,
+ strip_prefix = "apache-activemq-{}".format(ACTIVEMQ_VERSION),
+ build_file_content = """filegroup(
+ name = "exec_dir",
+ srcs = glob(["bin/**/*", "lib/**/*", "conf/**/*", "activemq-all-*.jar"]),
+ visibility = ["//visibility:public"],
+)
+""",
+ )
diff --git a/deps/amqp10_client/src/amqp10_client.erl b/deps/amqp10_client/src/amqp10_client.erl
index 4420cbd2b3..1d77bf8304 100644
--- a/deps/amqp10_client/src/amqp10_client.erl
+++ b/deps/amqp10_client/src/amqp10_client.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client).
@@ -434,7 +434,9 @@ parse_usertoken("") ->
none;
parse_usertoken(U) ->
[User, Pass] = string:tokens(U, ":"),
- {plain, to_binary(http_uri:decode(User)), to_binary(http_uri:decode(Pass))}.
+ {plain,
+ to_binary(uri_string:percent_decode(User)),
+ to_binary(uri_string:percent_decode(Pass))}.
parse_tls_opts(M) ->
lists:sort(maps:fold(fun parse_tls_opt/3, [], M)).
diff --git a/deps/amqp10_client/src/amqp10_client.hrl b/deps/amqp10_client/src/amqp10_client.hrl
index 0f2b6917cb..65cad53ce3 100644
--- a/deps/amqp10_client/src/amqp10_client.hrl
+++ b/deps/amqp10_client/src/amqp10_client.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(AMQP_PROTOCOL_HEADER, <<"AMQP", 0, 1, 0, 0>>).
diff --git a/deps/amqp10_client/src/amqp10_client_app.erl b/deps/amqp10_client/src/amqp10_client_app.erl
index 7b5271335b..413b9e1cd7 100644
--- a/deps/amqp10_client/src/amqp10_client_app.erl
+++ b/deps/amqp10_client/src/amqp10_client_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_app).
diff --git a/deps/amqp10_client/src/amqp10_client_connection.erl b/deps/amqp10_client/src/amqp10_client_connection.erl
index 5858e36c5a..3a168e3f9b 100644
--- a/deps/amqp10_client/src/amqp10_client_connection.erl
+++ b/deps/amqp10_client/src/amqp10_client_connection.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_connection).
@@ -92,8 +92,6 @@
-export_type([connection_config/0,
amqp10_socket/0]).
--define(DEFAULT_TIMEOUT, 5000).
-
%% -------------------------------------------------------------------
%% Public API.
%% -------------------------------------------------------------------
@@ -228,7 +226,7 @@ hdr_sent(_EvtType, {protocol_header_received, 0, 1, 0, 0}, State) ->
end;
hdr_sent(_EvtType, {protocol_header_received, Protocol, Maj, Min,
Rev}, State) ->
- error_logger:warning_msg("Unsupported protocol version: ~b ~b.~b.~b~n",
+ logger:warning("Unsupported protocol version: ~b ~b.~b.~b",
[Protocol, Maj, Min, Rev]),
{stop, normal, State};
hdr_sent({call, From}, begin_session,
@@ -257,7 +255,7 @@ open_sent(_EvtType, #'v1_0.open'{max_frame_size = MFSz,
S2
end, State1, PendingSessionReqs),
ok = notify_opened(Config),
- {next_state, opened, State2};
+ {next_state, opened, State2#state{pending_session_reqs = []}};
open_sent({call, From}, begin_session,
#state{pending_session_reqs = PendingSessionReqs} = State) ->
State1 = State#state{pending_session_reqs = [From | PendingSessionReqs]},
@@ -291,12 +289,19 @@ opened(info, {'DOWN', MRef, _, _, _Info},
ok = notify_closed(Config, shutdown),
{stop, normal, State};
opened(_EvtType, Frame, State) ->
- error_logger:warning_msg("Unexpected connection frame ~p when in state ~p ~n",
+ logger:warning("Unexpected connection frame ~p when in state ~p ",
[Frame, State]),
{keep_state, State}.
close_sent(_EvtType, heartbeat, State) ->
{next_state, close_sent, State};
+close_sent(_EvtType, {'EXIT', _Pid, shutdown}, State) ->
+ %% monitored processes may exit during closure
+ {next_state, close_sent, State};
+close_sent(_EvtType, {'DOWN', _Ref, process, ReaderPid, _},
+ #state{reader = ReaderPid} = State) ->
+ %% if the reader exits we probably wont receive a close frame
+ {stop, normal, State};
close_sent(_EvtType, #'v1_0.close'{}, State) ->
%% TODO: we should probably set up a timer before this to ensure
%% we close down event if no reply is received
@@ -367,7 +372,7 @@ send_open(#state{socket = Socket, config = Config}) ->
end,
Encoded = amqp10_framing:encode_bin(Open),
Frame = amqp10_binary_generator:build_frame(0, Encoded),
- ?DBG("CONN <- ~p~n", [Open]),
+ ?DBG("CONN <- ~p", [Open]),
socket_send(Socket, Frame).
@@ -375,7 +380,7 @@ send_close(#state{socket = Socket}, _Reason) ->
Close = #'v1_0.close'{},
Encoded = amqp10_framing:encode_bin(Close),
Frame = amqp10_binary_generator:build_frame(0, Encoded),
- ?DBG("CONN <- ~p~n", [Close]),
+ ?DBG("CONN <- ~p", [Close]),
Ret = socket_send(Socket, Frame),
case Ret of
ok -> _ =
@@ -397,7 +402,7 @@ send_sasl_init(State, {plain, User, Pass}) ->
send(Record, FrameType, #state{socket = Socket}) ->
Encoded = amqp10_framing:encode_bin(Record),
Frame = amqp10_binary_generator:build_frame(0, FrameType, Encoded),
- ?DBG("CONN <- ~p~n", [Record]),
+ ?DBG("CONN <- ~p", [Record]),
socket_send(Socket, Frame).
send_heartbeat(#state{socket = Socket}) ->
diff --git a/deps/amqp10_client/src/amqp10_client_connection_sup.erl b/deps/amqp10_client/src/amqp10_client_connection_sup.erl
index c20a7b86ce..6695a648cd 100644
--- a/deps/amqp10_client/src/amqp10_client_connection_sup.erl
+++ b/deps/amqp10_client/src/amqp10_client_connection_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_connection_sup).
diff --git a/deps/amqp10_client/src/amqp10_client_connections_sup.erl b/deps/amqp10_client/src/amqp10_client_connections_sup.erl
index 4200c64551..451dbbe272 100644
--- a/deps/amqp10_client/src/amqp10_client_connections_sup.erl
+++ b/deps/amqp10_client/src/amqp10_client_connections_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_connections_sup).
diff --git a/deps/amqp10_client/src/amqp10_client_frame_reader.erl b/deps/amqp10_client/src/amqp10_client_frame_reader.erl
index 524ead07ee..5a0b83d0cf 100644
--- a/deps/amqp10_client/src/amqp10_client_frame_reader.erl
+++ b/deps/amqp10_client/src/amqp10_client_frame_reader.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_frame_reader).
@@ -158,16 +158,16 @@ handle_event(info, {Tcp, _, Packet}, StateName, #state{buffer = Buffer} = State)
handle_event(info, {TcpError, _, Reason}, StateName, State)
when TcpError == tcp_error orelse TcpError == ssl_error ->
- error_logger:warning_msg("AMQP 1.0 connection socket errored, connection state: '~s', reason: '~p'~n",
- [StateName, Reason]),
+ logger:warning("AMQP 1.0 connection socket errored, connection state: '~s', reason: '~p'",
+ [StateName, Reason]),
State1 = State#state{socket = undefined,
buffer = <<>>,
frame_state = undefined},
{stop, {error, Reason}, State1};
handle_event(info, {TcpClosed, _}, StateName, State)
when TcpClosed == tcp_closed orelse TcpClosed == ssl_closed ->
- error_logger:warning_msg("AMQP 1.0 connection socket was closed, connection state: '~s'~n",
- [StateName]),
+ logger:warning("AMQP 1.0 connection socket was closed, connection state: '~s'",
+ [StateName]),
State1 = State#state{socket = undefined,
buffer = <<>>,
frame_state = undefined},
@@ -279,7 +279,7 @@ defer_heartbeat_timer(State) -> State.
route_frame(Channel, FrameType, {Performative, Payload} = Frame, State0) ->
{DestinationPid, State} = find_destination(Channel, FrameType, Performative,
State0),
- ?DBG("FRAME -> ~p ~p~n ~p~n", [Channel, DestinationPid, Performative]),
+ ?DBG("FRAME -> ~p ~p~n ~p", [Channel, DestinationPid, Performative]),
case Payload of
<<>> -> ok = gen_statem:cast(DestinationPid, Performative);
_ -> ok = gen_statem:cast(DestinationPid, Frame)
diff --git a/deps/amqp10_client/src/amqp10_client_session.erl b/deps/amqp10_client/src/amqp10_client_session.erl
index e758a8acbb..83567b74d6 100644
--- a/deps/amqp10_client/src/amqp10_client_session.erl
+++ b/deps/amqp10_client/src/amqp10_client_session.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_session).
@@ -372,7 +372,7 @@ mapped(cast, {#'v1_0.transfer'{handle = {uint, InHandle},
ok = notify_link(Link, credit_exhausted),
{next_state, mapped, State};
{transfer_limit_exceeded, State} ->
- error_logger:info_msg("transfer_limit_exceeded for link ~p~n", [Link]),
+ logger:warning("transfer_limit_exceeded for link ~p", [Link]),
Link1 = detach_with_error_cond(Link, State,
?V_1_0_LINK_ERROR_TRANSFER_LIMIT_EXCEEDED),
{next_state, mapped, update_link(Link1, State)}
@@ -403,7 +403,7 @@ mapped(cast, #'v1_0.disposition'{role = true, settled = true, first = {uint, Fir
{next_state, mapped, State#state{unsettled = Unsettled}};
mapped(cast, Frame, State) ->
- error_logger:warning_msg("Unhandled session frame ~p in state ~p~n",
+ logger:warning("Unhandled session frame ~p in state ~p",
[Frame, State]),
{next_state, mapped, State};
mapped({call, From},
@@ -490,7 +490,7 @@ mapped({call, From}, Msg, State) ->
{keep_state, State1, [{reply, From, Reply}]};
mapped(_EvtType, Msg, _State) ->
- error_logger:info_msg("amqp10_session: unhandled msg in mapped state ~W",
+ logger:warning("amqp10_session: unhandled msg in mapped state ~W",
[Msg, 10]),
keep_state_and_data.
@@ -957,7 +957,8 @@ amqp10_session_event(Evt) ->
socket_send(Sock, Data) ->
case socket_send0(Sock, Data) of
ok -> ok;
- {error, Reason} -> exit({socket_closed, Reason})
+ {error, _Reason} ->
+ throw({stop, normal})
end.
-dialyzer({no_fail_call, socket_send0/2}).
diff --git a/deps/amqp10_client/src/amqp10_client_sessions_sup.erl b/deps/amqp10_client/src/amqp10_client_sessions_sup.erl
index cbe8499819..560dd0f198 100644
--- a/deps/amqp10_client/src/amqp10_client_sessions_sup.erl
+++ b/deps/amqp10_client/src/amqp10_client_sessions_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_sessions_sup).
diff --git a/deps/amqp10_client/src/amqp10_client_sup.erl b/deps/amqp10_client/src/amqp10_client_sup.erl
index 345a51b141..dc38471909 100644
--- a/deps/amqp10_client/src/amqp10_client_sup.erl
+++ b/deps/amqp10_client/src/amqp10_client_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_sup).
diff --git a/deps/amqp10_client/src/amqp10_client_types.erl b/deps/amqp10_client/src/amqp10_client_types.erl
index 2f0ab2a413..a39f5eb2a9 100644
--- a/deps/amqp10_client/src/amqp10_client_types.erl
+++ b/deps/amqp10_client/src/amqp10_client_types.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_types).
diff --git a/deps/amqp10_client/src/amqp10_msg.erl b/deps/amqp10_client/src/amqp10_msg.erl
index fdd198e125..9cf2f7372e 100644
--- a/deps/amqp10_client/src/amqp10_msg.erl
+++ b/deps/amqp10_client/src/amqp10_msg.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_msg).
diff --git a/deps/amqp10_client/test/activemq_ct_helpers.erl b/deps/amqp10_client/test/activemq_ct_helpers.erl
index 89dbd5fc8e..01c3e26cb7 100644
--- a/deps/amqp10_client/test/activemq_ct_helpers.erl
+++ b/deps/amqp10_client/test/activemq_ct_helpers.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(activemq_ct_helpers).
diff --git a/deps/amqp10_client/test/mock_server.erl b/deps/amqp10_client/test/mock_server.erl
index c2098efb7c..77dae3f7e6 100644
--- a/deps/amqp10_client/test/mock_server.erl
+++ b/deps/amqp10_client/test/mock_server.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(mock_server).
diff --git a/deps/amqp10_client/test/msg_SUITE.erl b/deps/amqp10_client/test/msg_SUITE.erl
index c7c200c664..e559e8b415 100644
--- a/deps/amqp10_client/test/msg_SUITE.erl
+++ b/deps/amqp10_client/test/msg_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(msg_SUITE).
diff --git a/deps/amqp10_client/test/system_SUITE.erl b/deps/amqp10_client/test/system_SUITE.erl
index d81d48c4e3..ae2343926a 100644
--- a/deps/amqp10_client/test/system_SUITE.erl
+++ b/deps/amqp10_client/test/system_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
diff --git a/deps/amqp10_common/BUILD.bazel b/deps/amqp10_common/BUILD.bazel
new file mode 100644
index 0000000000..1995ddfffe
--- /dev/null
+++ b/deps/amqp10_common/BUILD.bazel
@@ -0,0 +1,114 @@
+load(
+ "@bazel-erlang//:bazel_erlang_lib.bzl",
+ "app_file",
+ "bazel_erlang_lib",
+ "erlc",
+)
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "APP_VERSION",
+ "RABBITMQ_ERLC_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "rabbitmq_suite",
+)
+
+py_binary(
+ name = "codegen",
+ srcs = [
+ "codegen.py",
+ ],
+ imports = ["../../deps/rabbitmq_codegen"],
+ deps = [
+ "//deps/rabbitmq_codegen:amqp_codegen",
+ ],
+)
+
+AMQP_SPEC_1_0 = [
+ "//deps/rabbitmq_codegen:amqp-1.0/messaging.xml",
+ "//deps/rabbitmq_codegen:amqp-1.0/security.xml",
+ "//deps/rabbitmq_codegen:amqp-1.0/transport.xml",
+ "//deps/rabbitmq_codegen:amqp-1.0/transactions.xml",
+]
+
+genrule(
+ name = "generated_headers",
+ srcs = AMQP_SPEC_1_0,
+ outs = ["include/amqp10_framing.hrl"],
+ cmd = "$(location :codegen) hrl $(SRCS) > $@",
+ tools = [":codegen"],
+)
+
+genrule(
+ name = "generated_sources",
+ srcs = AMQP_SPEC_1_0,
+ outs = ["src/amqp10_framing0.erl"],
+ cmd = "$(location :codegen) erl $(SRCS) > $@",
+ tools = [":codegen"],
+)
+
+app_file(
+ name = "app_file",
+ app_description = "Modules shared by rabbitmq-amqp1.0 and rabbitmq-amqp1.0-client",
+ app_name = "amqp10_common",
+ app_version = APP_VERSION,
+ modules = [":beam_files"],
+)
+
+erlc(
+ name = "beam_files",
+ srcs = ["src/amqp10_framing0.erl"] + glob(["src/*.erl"]),
+ hdrs = ["include/amqp10_framing.hrl"] + glob(["include/*.hrl"]),
+ dest = "ebin",
+ erlc_opts = RABBITMQ_ERLC_OPTS,
+)
+
+bazel_erlang_lib(
+ name = "bazel_erlang_lib",
+ hdrs = ["include/amqp10_framing.hrl"] + glob(["include/*.hrl"]),
+ app = ":app_file",
+ app_name = "amqp10_common",
+ beam = [":beam_files"],
+ visibility = ["//visibility:public"],
+)
+
+erlc(
+ name = "test_beam_files",
+ srcs = ["src/amqp10_framing0.erl"] + glob(["src/*.erl"]),
+ hdrs = ["include/amqp10_framing.hrl"] + glob(["include/*.hrl"]),
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+)
+
+bazel_erlang_lib(
+ name = "test_bazel_erlang_lib",
+ testonly = True,
+ hdrs = ["include/amqp10_framing.hrl"] + glob(["include/*.hrl"]),
+ app = ":app_file",
+ app_name = "amqp10_common",
+ beam = [":test_beam_files"],
+ visibility = ["//visibility:public"],
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+suites = [
+ rabbitmq_suite(
+ name = "binary_generator_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "binary_parser_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/amqp10_common/CONTRIBUTING.md b/deps/amqp10_common/CONTRIBUTING.md
index 45bbcbe62e..339d097deb 100644
--- a/deps/amqp10_common/CONTRIBUTING.md
+++ b/deps/amqp10_common/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/amqp10_common/Makefile b/deps/amqp10_common/Makefile
index 291a129f88..71ef0025e9 100644
--- a/deps/amqp10_common/Makefile
+++ b/deps/amqp10_common/Makefile
@@ -28,7 +28,7 @@ ERLANG_MK_COMMIT = rabbitmq-tmp
-include development.pre.mk
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
-include development.post.mk
diff --git a/deps/amqp10_common/src/amqp10_binary_generator.erl b/deps/amqp10_common/src/amqp10_binary_generator.erl
index 7585c48d12..ff8b1c7348 100644
--- a/deps/amqp10_common/src/amqp10_binary_generator.erl
+++ b/deps/amqp10_common/src/amqp10_binary_generator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_binary_generator).
diff --git a/deps/amqp10_common/src/amqp10_binary_parser.erl b/deps/amqp10_common/src/amqp10_binary_parser.erl
index 87f78934a0..376ac47aed 100644
--- a/deps/amqp10_common/src/amqp10_binary_parser.erl
+++ b/deps/amqp10_common/src/amqp10_binary_parser.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_binary_parser).
@@ -31,15 +31,15 @@ parse_described(Bin) ->
{Value, Rest2} = parse(Rest1),
{{described, Descriptor, Value}, Rest2}.
-parse_primitive0(<<Type,Rest/binary>>) ->
+parse_primitive0(<<Type, Rest/binary>>) ->
parse_primitive(Type, Rest).
%% Constants
-parse_primitive(16#40, Rest) -> {null, Rest};
-parse_primitive(16#41, Rest) -> {true, Rest};
-parse_primitive(16#42, Rest) -> {false, Rest};
-parse_primitive(16#43, Rest) -> {{uint, 0}, Rest};
-parse_primitive(16#44, Rest) -> {{ulong, 0}, Rest};
+parse_primitive(16#40, R) -> {null, R};
+parse_primitive(16#41, R) -> {true, R};
+parse_primitive(16#42, R) -> {false, R};
+parse_primitive(16#43, R) -> {{uint, 0}, R};
+parse_primitive(16#44, R) -> {{ulong, 0}, R};
%% Fixed-widths. Most integral types have a compact encoding as a byte.
parse_primitive(16#50, <<V:8/unsigned, R/binary>>) -> {{ubyte, V}, R};
@@ -122,6 +122,14 @@ parse_compound1(Count, Bin, Acc) ->
{Value, Rest} = parse(Bin),
parse_compound1(Count - 1, Rest, [Value | Acc]).
+parse_array_primitive(16#40, <<_:8/unsigned, R/binary>>) -> {null, R};
+parse_array_primitive(16#41, <<_:8/unsigned, R/binary>>) -> {true, R};
+parse_array_primitive(16#42, <<_:8/unsigned, R/binary>>) -> {false, R};
+parse_array_primitive(16#43, <<_:8/unsigned, R/binary>>) -> {{uint, 0}, R};
+parse_array_primitive(16#44, <<_:8/unsigned, R/binary>>) -> {{ulong, 0}, R};
+parse_array_primitive(ElementType, Data) ->
+ parse_primitive(ElementType, Data).
+
%% array structure is {array, Ctor, [Data]}
%% e.g. {array, symbol, [<<"amqp:accepted:list">>]}
parse_array(UnitSize, Bin) ->
@@ -141,8 +149,12 @@ parse_array1(Count, <<Type, ArrayBin/binary>>) ->
parse_array2(0, Type, <<>>, Acc) ->
{array, parse_constructor(Type), lists:reverse(Acc)};
+parse_array2(0, Type, Bin, Acc) ->
+ exit({failed_to_parse_array_extra_input_remaining, Type, Bin, Acc});
+parse_array2(Count, Type, <<>>, Acc) when Count > 0 ->
+ exit({failed_to_parse_array_insufficient_input, Type, Count, Acc});
parse_array2(Count, Type, Bin, Acc) ->
- {Value, Rest} = parse_primitive(Type, Bin),
+ {Value, Rest} = parse_array_primitive(Type, Bin),
parse_array2(Count - 1, Type, Rest, [Value | Acc]).
parse_constructor(16#a3) -> symbol;
diff --git a/deps/amqp10_common/src/amqp10_framing.erl b/deps/amqp10_common/src/amqp10_framing.erl
index f96cf8b602..fc89e6ef76 100644
--- a/deps/amqp10_common/src/amqp10_framing.erl
+++ b/deps/amqp10_common/src/amqp10_framing.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_framing).
diff --git a/deps/amqp10_common/test/binary_parser_SUITE.erl b/deps/amqp10_common/test/binary_parser_SUITE.erl
new file mode 100644
index 0000000000..7c1b2322e3
--- /dev/null
+++ b/deps/amqp10_common/test/binary_parser_SUITE.erl
@@ -0,0 +1,59 @@
+-module(binary_parser_SUITE).
+
+-compile(export_all).
+
+-export([
+ ]).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+%%%===================================================================
+%%% Common Test callbacks
+%%%===================================================================
+
+all() ->
+ [
+ {group, tests}
+ ].
+
+
+all_tests() ->
+ [
+ array_with_extra_input
+ ].
+
+groups() ->
+ [
+ {tests, [], all_tests()}
+ ].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_Group, Config) ->
+ Config.
+
+end_per_group(_Group, _Config) ->
+ ok.
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_TestCase, _Config) ->
+ ok.
+
+%%%===================================================================
+%%% Test cases
+%%%===================================================================
+
+array_with_extra_input(_Config) ->
+ Bin = <<83,16,192,85,10,177,0,0,0,1,48,161,12,114,97,98,98,105,116, 109,113,45,98,111,120,112,255,255,0,0,96,0,50,112,0,0,19,136,163,5,101,110,45,85,83,224,14,2,65,5,102,105,45,70,73,5,101,110,45,85,83,64,64,193,24,2,163,20,68,69,70,69,78,83,73,67,83,46,84,69,83,84,46,83,85,73,84,69,65>>,
+ ?assertExit({failed_to_parse_array_extra_input_remaining,
+ %% element type, input, accumulated result
+ 65, <<105,45,70,73,5,101,110,45,85,83>>, [true,true]},
+ amqp10_binary_parser:parse_all(Bin)),
+ ok.
diff --git a/deps/amqp_client/BUILD.bazel b/deps/amqp_client/BUILD.bazel
new file mode 100644
index 0000000000..5fec64c732
--- /dev/null
+++ b/deps/amqp_client/BUILD.bazel
@@ -0,0 +1,97 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+load(
+ "//:rabbitmq.bzl",
+ "assert_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_ENV = """[
+ {prefer_ipv6, false},
+ {ssl_options, []},
+ {writer_gc_threshold, 1000000000}
+ ]"""
+
+EXTRA_APPS = [
+ "xmerl",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = "RabbitMQ AMQP Client",
+ app_env = APP_ENV,
+ app_module = "amqp_client",
+ app_name = "amqp_client",
+ app_registered = [
+ "amqp_sup",
+ ],
+ extra_apps = EXTRA_APPS,
+ first_srcs = [
+ "src/amqp_gen_connection.erl",
+ "src/amqp_gen_consumer.erl",
+ ],
+ deps = DEPS,
+)
+
+xref(
+ additional_libs = [
+ "@ranch//:bazel_erlang_lib",
+ ],
+ tags = ["xref"],
+)
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+rabbitmq_home(
+ name = "broker-for-tests-home",
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ home = ":broker-for-tests-home",
+)
+
+suites = [
+ rabbitmq_integration_suite(
+ "deps/amqp_client",
+ name = "system_SUITE",
+ size = "large",
+ runtime_deps = [
+ "@meck//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ size = "small",
+ runtime_deps = [
+ "//deps/rabbit:bazel_erlang_lib",
+ ],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/amqp_client/CONTRIBUTING.md b/deps/amqp_client/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/amqp_client/CONTRIBUTING.md
+++ b/deps/amqp_client/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/amqp_client/Makefile b/deps/amqp_client/Makefile
index b15fd918f0..6b29ba950d 100644
--- a/deps/amqp_client/Makefile
+++ b/deps/amqp_client/Makefile
@@ -30,7 +30,7 @@ PACKAGES_DIR ?= $(abspath PACKAGES)
LOCAL_DEPS = xmerl
DEPS = rabbit_common
-TEST_DEPS = rabbitmq_ct_helpers rabbit
+TEST_DEPS = rabbitmq_ct_helpers rabbit meck
DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-test.mk
DEP_PLUGINS = rabbit_common/mk/rabbitmq-build.mk \
@@ -50,8 +50,8 @@ ERLANG_MK_COMMIT = rabbitmq-tmp
WITHOUT = plugins/proper
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
# --------------------------------------------------------------------
# Distribution.
diff --git a/deps/amqp_client/erlang.mk b/deps/amqp_client/erlang.mk
deleted file mode 100644
index defddc4865..0000000000
--- a/deps/amqp_client/erlang.mk
+++ /dev/null
@@ -1,7746 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT = plugins/proper
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/amqp_client/include/amqp_client.hrl b/deps/amqp_client/include/amqp_client.hrl
index 382525177d..5206305fd9 100644
--- a/deps/amqp_client/include/amqp_client.hrl
+++ b/deps/amqp_client/include/amqp_client.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-ifndef(AMQP_CLIENT_HRL).
diff --git a/deps/amqp_client/include/amqp_client_internal.hrl b/deps/amqp_client/include/amqp_client_internal.hrl
index 01e099097e..64aca19122 100644
--- a/deps/amqp_client/include/amqp_client_internal.hrl
+++ b/deps/amqp_client/include/amqp_client_internal.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-include("amqp_client.hrl").
@@ -28,3 +28,6 @@
{<<"authentication_failure_close">>, bool, true}]).
-define(WAIT_FOR_CONFIRMS_TIMEOUT, {60000, millisecond}).
+
+-define(DIRECT_OPERATION_TIMEOUT, 120000).
+-define(CALL_TIMEOUT_DEVIATION, 10000).
diff --git a/deps/amqp_client/rabbitmq-components.mk b/deps/amqp_client/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/amqp_client/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/amqp_client/src/amqp_auth_mechanisms.erl b/deps/amqp_client/src/amqp_auth_mechanisms.erl
index 549cd17376..96396d97b5 100644
--- a/deps/amqp_client/src/amqp_auth_mechanisms.erl
+++ b/deps/amqp_client/src/amqp_auth_mechanisms.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
diff --git a/deps/amqp_client/src/amqp_channel.erl b/deps/amqp_client/src/amqp_channel.erl
index 9e95df4fe3..b00b43565d 100644
--- a/deps/amqp_client/src/amqp_channel.erl
+++ b/deps/amqp_client/src/amqp_channel.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @type close_reason(Type) = {shutdown, amqp_reason(Type)}.
@@ -511,7 +511,7 @@ handle_info({bump_credit, Msg}, State) ->
%% @private
handle_info(timed_out_flushing_channel, State) ->
?LOG_WARN("Channel (~p) closing: timed out flushing while "
- "connection closing~n", [self()]),
+ "connection closing", [self()]),
{stop, timed_out_flushing_channel, State};
%% @private
handle_info({'DOWN', _, process, ReturnHandler, shutdown},
@@ -520,7 +520,7 @@ handle_info({'DOWN', _, process, ReturnHandler, shutdown},
handle_info({'DOWN', _, process, ReturnHandler, Reason},
State = #state{return_handler = {ReturnHandler, _Ref}}) ->
?LOG_WARN("Channel (~p): Unregistering return handler ~p because it died. "
- "Reason: ~p~n", [self(), ReturnHandler, Reason]),
+ "Reason: ~p", [self(), ReturnHandler, Reason]),
{noreply, State#state{return_handler = none}};
%% @private
handle_info({'DOWN', _, process, ConfirmHandler, shutdown},
@@ -529,7 +529,7 @@ handle_info({'DOWN', _, process, ConfirmHandler, shutdown},
handle_info({'DOWN', _, process, ConfirmHandler, Reason},
State = #state{confirm_handler = {ConfirmHandler, _Ref}}) ->
?LOG_WARN("Channel (~p): Unregistering confirm handler ~p because it died. "
- "Reason: ~p~n", [self(), ConfirmHandler, Reason]),
+ "Reason: ~p", [self(), ConfirmHandler, Reason]),
{noreply, State#state{confirm_handler = none}};
%% @private
handle_info({'DOWN', _, process, FlowHandler, shutdown},
@@ -538,7 +538,7 @@ handle_info({'DOWN', _, process, FlowHandler, shutdown},
handle_info({'DOWN', _, process, FlowHandler, Reason},
State = #state{flow_handler = {FlowHandler, _Ref}}) ->
?LOG_WARN("Channel (~p): Unregistering flow handler ~p because it died. "
- "Reason: ~p~n", [self(), FlowHandler, Reason]),
+ "Reason: ~p", [self(), FlowHandler, Reason]),
{noreply, State#state{flow_handler = none}};
handle_info({'DOWN', _, process, QPid, _Reason}, State) ->
rabbit_amqqueue_common:notify_sent_queue_down(QPid),
@@ -588,13 +588,13 @@ handle_method_to_server(Method, AmqpMsg, From, Sender, Flow,
From, Sender, Flow, State1)};
{ok, none, BlockReply} ->
?LOG_WARN("Channel (~p): discarding method ~p in cast.~n"
- "Reason: ~p~n", [self(), Method, BlockReply]),
+ "Reason: ~p", [self(), Method, BlockReply]),
{noreply, State};
{ok, _, BlockReply} ->
{reply, BlockReply, State};
{{_, InvalidMethodMessage}, none, _} ->
?LOG_WARN("Channel (~p): ignoring cast of ~p method. " ++
- InvalidMethodMessage ++ "~n", [self(), Method]),
+ InvalidMethodMessage ++ "", [self(), Method]),
{noreply, State};
{{InvalidMethodReply, _}, _, _} ->
{reply, {error, InvalidMethodReply}, State}
@@ -695,7 +695,7 @@ safely_handle_method_from_server(Method, Content,
_ -> false
end,
if Drop -> ?LOG_INFO("Channel (~p): dropping method ~p from "
- "server because channel is closing~n",
+ "server because channel is closing",
[self(), {Method, Content}]),
{noreply, State};
true ->
@@ -776,7 +776,7 @@ handle_method_from_server1(
State = #state{return_handler = ReturnHandler}) ->
case ReturnHandler of
none -> ?LOG_WARN("Channel (~p): received {~p, ~p} but there is "
- "no return handler registered~n",
+ "no return handler registered",
[self(), BasicReturn, AmqpMsg]);
{Pid, _Ref} -> Pid ! {BasicReturn, AmqpMsg}
end,
@@ -791,7 +791,7 @@ handle_method_from_server1(#'basic.ack'{} = BasicAck, none,
handle_method_from_server1(#'basic.nack'{} = BasicNack, none,
#state{confirm_handler = none} = State) ->
?LOG_WARN("Channel (~p): received ~p but there is no "
- "confirm handler registered~n", [self(), BasicNack]),
+ "confirm handler registered", [self(), BasicNack]),
{noreply, update_confirm_set(BasicNack, State)};
handle_method_from_server1(#'basic.nack'{} = BasicNack, none,
#state{confirm_handler = {CH, _Ref}} = State) ->
@@ -835,7 +835,7 @@ handle_connection_closing(CloseType, Reason,
handle_channel_exit(Reason = #amqp_error{name = ErrorName, explanation = Expl},
State = #state{connection = Connection, number = Number}) ->
%% Sent by rabbit_channel for hard errors in the direct case
- ?LOG_ERR("connection ~p, channel ~p - error:~n~p~n",
+ ?LOG_ERR("connection ~p, channel ~p - error:~n~p",
[Connection, Number, Reason]),
{true, Code, _} = ?PROTOCOL:lookup_amqp_exception(ErrorName),
ReportedReason = {server_initiated_close, Code, Expl},
@@ -928,7 +928,7 @@ server_misbehaved(#amqp_error{} = AmqpError, State = #state{number = Number}) ->
handle_shutdown({server_misbehaved, AmqpError}, State);
{_, Close} ->
?LOG_WARN("Channel (~p) flushing and closing due to soft "
- "error caused by the server ~p~n", [self(), AmqpError]),
+ "error caused by the server ~p", [self(), AmqpError]),
Self = self(),
spawn(fun () -> call(Self, Close) end),
{noreply, State}
diff --git a/deps/amqp_client/src/amqp_channel_sup.erl b/deps/amqp_client/src/amqp_channel_sup.erl
index 9bd85ce946..e1a3b6e8ad 100644
--- a/deps/amqp_client/src/amqp_channel_sup.erl
+++ b/deps/amqp_client/src/amqp_channel_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
@@ -47,9 +47,16 @@ start_link(Type, Connection, ConnName, InfraArgs, ChNumber,
start_writer(_Sup, direct, [ConnPid, Node, User, VHost, Collector, AmqpParams],
ConnName, ChNumber, ChPid) ->
- rpc:call(Node, rabbit_direct, start_channel,
- [ChNumber, ChPid, ConnPid, ConnName, ?PROTOCOL, User,
- VHost, ?CLIENT_CAPABILITIES, Collector, AmqpParams]);
+ case rpc:call(Node, rabbit_direct, start_channel,
+ [ChNumber, ChPid, ConnPid, ConnName, ?PROTOCOL, User,
+ VHost, ?CLIENT_CAPABILITIES, Collector, AmqpParams], ?DIRECT_OPERATION_TIMEOUT) of
+ {ok, _Writer} = Reply ->
+ Reply;
+ {badrpc, Reason} ->
+ {error, {Reason, Node}};
+ Error ->
+ Error
+ end;
start_writer(Sup, network, [Sock, FrameMax], ConnName, ChNumber, ChPid) ->
GCThreshold = application:get_env(amqp_client, writer_gc_threshold, ?DEFAULT_GC_THRESHOLD),
supervisor2:start_child(
diff --git a/deps/amqp_client/src/amqp_channel_sup_sup.erl b/deps/amqp_client/src/amqp_channel_sup_sup.erl
index 720b0e5726..314abb49da 100644
--- a/deps/amqp_client/src/amqp_channel_sup_sup.erl
+++ b/deps/amqp_client/src/amqp_channel_sup_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
diff --git a/deps/amqp_client/src/amqp_channels_manager.erl b/deps/amqp_client/src/amqp_channels_manager.erl
index 2a8d427dc4..c435ba6502 100644
--- a/deps/amqp_client/src/amqp_channels_manager.erl
+++ b/deps/amqp_client/src/amqp_channels_manager.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
@@ -203,7 +203,7 @@ internal_pass_frame(Number, Frame, State) ->
case internal_lookup_npa(Number, State) of
undefined ->
?LOG_INFO("Dropping frame ~p for invalid or closed "
- "channel number ~p~n", [Frame, Number]),
+ "channel number ~p", [Frame, Number]),
State;
{ChPid, AState} ->
NewAState = process_channel_frame(Frame, Number, ChPid, AState),
diff --git a/deps/amqp_client/src/amqp_client.erl b/deps/amqp_client/src/amqp_client.erl
index cf85c1b04c..7989ca4a23 100644
--- a/deps/amqp_client/src/amqp_client.erl
+++ b/deps/amqp_client/src/amqp_client.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
diff --git a/deps/amqp_client/src/amqp_connection.erl b/deps/amqp_client/src/amqp_connection.erl
index 6800a44a3e..67cb43b6a4 100644
--- a/deps/amqp_client/src/amqp_connection.erl
+++ b/deps/amqp_client/src/amqp_connection.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @type close_reason(Type) = {shutdown, amqp_reason(Type)}.
@@ -170,6 +170,7 @@ start(AmqpParams, ConnName) when ConnName == undefined; is_binary(ConnName) ->
end,
AmqpParams2 = set_connection_name(ConnName, AmqpParams1),
AmqpParams3 = amqp_ssl:maybe_enhance_ssl_options(AmqpParams2),
+ ok = ensure_safe_call_timeout(AmqpParams3, amqp_util:call_timeout()),
{ok, _Sup, Connection} = amqp_sup:start_connection_sup(AmqpParams3),
amqp_gen_connection:connect(Connection).
@@ -393,3 +394,30 @@ connection_name(ConnectionPid) ->
{<<"connection_name">>, _, ConnName} -> ConnName;
false -> undefined
end.
+
+ensure_safe_call_timeout(#amqp_params_network{connection_timeout = ConnTimeout}, CallTimeout) ->
+ maybe_update_call_timeout(ConnTimeout, CallTimeout);
+ensure_safe_call_timeout(#amqp_params_direct{}, CallTimeout) ->
+ case net_kernel:get_net_ticktime() of
+ NetTicktime when is_integer(NetTicktime) ->
+ maybe_update_call_timeout(tick_or_direct_timeout(NetTicktime * 1000),
+ CallTimeout);
+ {ongoing_change_to, NetTicktime} ->
+ maybe_update_call_timeout(tick_or_direct_timeout(NetTicktime * 1000),
+ CallTimeout);
+ ignored ->
+ maybe_update_call_timeout(?DIRECT_OPERATION_TIMEOUT, CallTimeout)
+ end.
+
+maybe_update_call_timeout(BaseTimeout, CallTimeout)
+ when is_integer(BaseTimeout), CallTimeout > BaseTimeout ->
+ ok;
+maybe_update_call_timeout(BaseTimeout, CallTimeout) ->
+ EffectiveSafeCallTimeout = amqp_util:safe_call_timeout(BaseTimeout),
+ ?LOG_WARN("AMQP 0-9-1 client call timeout was ~p ms, is updated to a safe effective "
+ "value of ~p ms", [CallTimeout, EffectiveSafeCallTimeout]),
+ amqp_util:update_call_timeout(EffectiveSafeCallTimeout),
+ ok.
+
+tick_or_direct_timeout(Timeout) when Timeout >= ?DIRECT_OPERATION_TIMEOUT -> Timeout;
+tick_or_direct_timeout(_Timeout) -> ?DIRECT_OPERATION_TIMEOUT.
diff --git a/deps/amqp_client/src/amqp_connection_sup.erl b/deps/amqp_client/src/amqp_connection_sup.erl
index b71fb54fd4..44848f42ea 100644
--- a/deps/amqp_client/src/amqp_connection_sup.erl
+++ b/deps/amqp_client/src/amqp_connection_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
diff --git a/deps/amqp_client/src/amqp_connection_type_sup.erl b/deps/amqp_client/src/amqp_connection_type_sup.erl
index f67dc56836..19f63a0512 100644
--- a/deps/amqp_client/src/amqp_connection_type_sup.erl
+++ b/deps/amqp_client/src/amqp_connection_type_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
diff --git a/deps/amqp_client/src/amqp_direct_connection.erl b/deps/amqp_client/src/amqp_direct_connection.erl
index a07c67074e..938662a5f8 100644
--- a/deps/amqp_client/src/amqp_direct_connection.erl
+++ b/deps/amqp_client/src/amqp_direct_connection.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
@@ -143,7 +143,7 @@ connect(Params = #amqp_params_direct{username = Username,
DecryptedPassword = credentials_obfuscation:decrypt(Password),
case rpc:call(Node, rabbit_direct, connect,
[{Username, DecryptedPassword}, VHost, ?PROTOCOL, self(),
- connection_info(State1)]) of
+ connection_info(State1)], ?DIRECT_OPERATION_TIMEOUT) of
{ok, {User, ServerProperties}} ->
{ok, ChMgr, Collector} = SIF(i(name, State1)),
State2 = State1#state{user = User,
@@ -158,8 +158,8 @@ connect(Params = #amqp_params_direct{username = Username,
{ok, {ServerProperties, 0, ChMgr, State2}};
{error, _} = E ->
E;
- {badrpc, nodedown} ->
- {error, {nodedown, Node}}
+ {badrpc, Reason} ->
+ {error, {Reason, Node}}
end.
ensure_adapter_info(none) ->
@@ -195,14 +195,14 @@ socket_adapter_info(Sock, Protocol) ->
maybe_ssl_info(Sock) ->
RealSocket = rabbit_net:unwrap_socket(Sock),
- case rabbit_net:is_ssl(RealSocket) of
- true -> [{ssl, true}] ++ ssl_info(RealSocket) ++ ssl_cert_info(RealSocket);
- false -> [{ssl, false}]
+ case rabbit_net:proxy_ssl_info(RealSocket, rabbit_net:maybe_get_proxy_socket(Sock)) of
+ nossl -> [{ssl, false}];
+ Info -> [{ssl, true}] ++ ssl_info(Info) ++ ssl_cert_info(RealSocket)
end.
-ssl_info(Sock) ->
+ssl_info(Info) ->
{Protocol, KeyExchange, Cipher, Hash} =
- case rabbit_net:ssl_info(Sock) of
+ case Info of
{ok, Infos} ->
{_, P} = lists:keyfind(protocol, 1, Infos),
#{cipher := C,
diff --git a/deps/amqp_client/src/amqp_direct_consumer.erl b/deps/amqp_client/src/amqp_direct_consumer.erl
index 74517a03c8..f2bded5a83 100644
--- a/deps/amqp_client/src/amqp_direct_consumer.erl
+++ b/deps/amqp_client/src/amqp_direct_consumer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc This module is an implementation of the amqp_gen_consumer
diff --git a/deps/amqp_client/src/amqp_gen_connection.erl b/deps/amqp_client/src/amqp_gen_connection.erl
index 5c826a5b5f..3bd63996f1 100644
--- a/deps/amqp_client/src/amqp_gen_connection.erl
+++ b/deps/amqp_client/src/amqp_gen_connection.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
@@ -49,12 +49,14 @@ connect(Pid) ->
gen_server:call(Pid, connect, amqp_util:call_timeout()).
open_channel(Pid, ProposedNumber, Consumer) ->
- case gen_server:call(Pid,
+ try gen_server:call(Pid,
{command, {open_channel, ProposedNumber, Consumer}},
amqp_util:call_timeout()) of
{ok, ChannelPid} -> ok = amqp_channel:open(ChannelPid),
{ok, ChannelPid};
Error -> Error
+ catch
+ _:Reason -> {error, Reason}
end.
hard_error_in_channel(Pid, ChannelPid, Reason) ->
@@ -201,7 +203,7 @@ handle_cast(channels_terminated, State) ->
handle_cast({hard_error_in_channel, _Pid, Reason}, State) ->
server_initiated_close(Reason, State);
handle_cast({channel_internal_error, Pid, Reason}, State) ->
- ?LOG_WARN("Connection (~p) closing: internal error in channel (~p): ~p~n",
+ ?LOG_WARN("Connection (~p) closing: internal error in channel (~p): ~p",
[self(), Pid, Reason]),
internal_error(Pid, Reason, State);
handle_cast({server_misbehaved, AmqpError}, State) ->
@@ -216,12 +218,12 @@ handle_cast({register_blocked_handler, HandlerPid}, State) ->
handle_info({'DOWN', _, process, BlockHandler, Reason},
State = #state{block_handler = {BlockHandler, _Ref}}) ->
?LOG_WARN("Connection (~p): Unregistering connection.{blocked,unblocked} handler ~p because it died. "
- "Reason: ~p~n", [self(), BlockHandler, Reason]),
+ "Reason: ~p", [self(), BlockHandler, Reason]),
{noreply, State#state{block_handler = none}};
handle_info({'EXIT', BlockHandler, Reason},
State = #state{block_handler = {BlockHandler, Ref}}) ->
?LOG_WARN("Connection (~p): Unregistering connection.{blocked,unblocked} handler ~p because it died. "
- "Reason: ~p~n", [self(), BlockHandler, Reason]),
+ "Reason: ~p", [self(), BlockHandler, Reason]),
erlang:demonitor(Ref, [flush]),
{noreply, State#state{block_handler = none}};
%% propagate the exit to the module that will stop with a sensible reason logged
@@ -327,12 +329,12 @@ internal_error(Pid, Reason, State) ->
server_initiated_close(Close, State) ->
?LOG_WARN("Connection (~p) closing: received hard error ~p "
- "from server~n", [self(), Close]),
+ "from server", [self(), Close]),
set_closing_state(abrupt, #closing{reason = server_initiated_close,
close = Close}, State).
server_misbehaved_close(AmqpError, State) ->
- ?LOG_WARN("Connection (~p) closing: server misbehaved: ~p~n",
+ ?LOG_WARN("Connection (~p) closing: server misbehaved: ~p",
[self(), AmqpError]),
{0, Close} = rabbit_binary_generator:map_exception(0, AmqpError, ?PROTOCOL),
set_closing_state(abrupt, #closing{reason = server_misbehaved,
diff --git a/deps/amqp_client/src/amqp_gen_consumer.erl b/deps/amqp_client/src/amqp_gen_consumer.erl
index 9caea78d8a..c24cbb819a 100644
--- a/deps/amqp_client/src/amqp_gen_consumer.erl
+++ b/deps/amqp_client/src/amqp_gen_consumer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc A behaviour module for implementing consumers for
diff --git a/deps/amqp_client/src/amqp_main_reader.erl b/deps/amqp_client/src/amqp_main_reader.erl
index 60cd93d03b..d71b3aff21 100644
--- a/deps/amqp_client/src/amqp_main_reader.erl
+++ b/deps/amqp_client/src/amqp_main_reader.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
diff --git a/deps/amqp_client/src/amqp_network_connection.erl b/deps/amqp_client/src/amqp_network_connection.erl
index 975ea591da..8a798659fc 100644
--- a/deps/amqp_client/src/amqp_network_connection.erl
+++ b/deps/amqp_client/src/amqp_network_connection.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
@@ -108,9 +108,8 @@ info_keys() ->
connect(AmqpParams = #amqp_params_network{host = Host}, SIF, TypeSup, State) ->
case gethostaddr(Host) of
- [] -> {error, unknown_host};
- [AF|_] -> do_connect(
- AF, AmqpParams, SIF, State#state{type_sup = TypeSup})
+ {error, Reason} -> {error, Reason};
+ AF -> do_connect(AF, AmqpParams, SIF, State#state{type_sup = TypeSup})
end.
do_connect({Addr, Family},
@@ -163,9 +162,15 @@ inet_address_preference() ->
end.
gethostaddr(Host) ->
- Lookups = [{Family, inet:getaddr(Host, Family)}
- || Family <- inet_address_preference()],
- [{IP, Family} || {Family, {ok, IP}} <- Lookups].
+ resolve_address(Host, inet_address_preference()).
+
+resolve_address(Host, [Family | Remaining]) ->
+ case inet:getaddr(Host, Family) of
+ {ok, IP} -> {IP, Family};
+ _ -> resolve_address(Host, Remaining)
+ end;
+resolve_address(_Host, []) ->
+ {error, unknown_host}.
try_handshake(AmqpParams, SIF, State = #state{sock = Sock}) ->
Name = case rabbit_net:connection_string(Sock, outbound) of
@@ -179,12 +184,16 @@ try_handshake(AmqpParams, SIF, State = #state{sock = Sock}) ->
end.
handshake(AmqpParams, SIF, State0 = #state{sock = Sock}) ->
- ok = rabbit_net:send(Sock, ?PROTOCOL_HEADER),
- case start_infrastructure(SIF, State0) of
- {ok, ChMgr, State1} ->
- network_handshake(AmqpParams, {ChMgr, State1});
- {error, Reason} ->
- {error, Reason}
+ case rabbit_net:send(Sock, ?PROTOCOL_HEADER) of
+ ok ->
+ case start_infrastructure(SIF, State0) of
+ {ok, ChMgr, State1} ->
+ network_handshake(AmqpParams, {ChMgr, State1});
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
end.
start_infrastructure(SIF, State = #state{sock = Sock, name = Name}) ->
@@ -308,7 +317,7 @@ client_properties(UserProperties) ->
{<<"version">>, longstr, list_to_binary(Vsn)},
{<<"platform">>, longstr, <<"Erlang">>},
{<<"copyright">>, longstr,
- <<"Copyright (c) 2007-2020 VMware, Inc. or its affiliates.">>},
+ <<"Copyright (c) 2007-2021 VMware, Inc. or its affiliates.">>},
{<<"information">>, longstr,
<<"Licensed under the MPL. "
"See https://www.rabbitmq.com/">>},
diff --git a/deps/amqp_client/src/amqp_rpc_client.erl b/deps/amqp_client/src/amqp_rpc_client.erl
index 3fd9a34650..e8387ab4ba 100644
--- a/deps/amqp_client/src/amqp_rpc_client.erl
+++ b/deps/amqp_client/src/amqp_rpc_client.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc This module allows the simple execution of an asynchronous RPC over
diff --git a/deps/amqp_client/src/amqp_rpc_server.erl b/deps/amqp_client/src/amqp_rpc_server.erl
index 44e5113a94..929a4c0ef9 100644
--- a/deps/amqp_client/src/amqp_rpc_server.erl
+++ b/deps/amqp_client/src/amqp_rpc_server.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc This is a utility module that is used to expose an arbitrary function
diff --git a/deps/amqp_client/src/amqp_selective_consumer.erl b/deps/amqp_client/src/amqp_selective_consumer.erl
index da1138654e..de3c39de57 100644
--- a/deps/amqp_client/src/amqp_selective_consumer.erl
+++ b/deps/amqp_client/src/amqp_selective_consumer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc This module is an implementation of the amqp_gen_consumer
diff --git a/deps/amqp_client/src/amqp_ssl.erl b/deps/amqp_client/src/amqp_ssl.erl
index ff2bddd55a..319b7f9647 100644
--- a/deps/amqp_client/src/amqp_ssl.erl
+++ b/deps/amqp_client/src/amqp_ssl.erl
@@ -5,7 +5,6 @@
-include_lib("public_key/include/public_key.hrl").
-export([maybe_enhance_ssl_options/1,
- add_verify_fun_to_opts/2,
verify_fun/3]).
maybe_enhance_ssl_options(Params = #amqp_params_network{ssl_options = none}) ->
@@ -26,27 +25,16 @@ maybe_add_sni_0(false, Host, Options) ->
% server_name_indication at all. If Host is a DNS host name,
% we will specify server_name_indication via code
maybe_add_sni_1(inet_parse:domain(Host), Host, Options);
-maybe_add_sni_0({server_name_indication, disable}, _Host, Options) ->
+maybe_add_sni_0({server_name_indication, _DisableOrSniHost}, _Host, Options) ->
% NB: this is the case where the user explicitly disabled
- % server_name_indication
- Options;
-maybe_add_sni_0({server_name_indication, SniHost}, _Host, Options) ->
- % NB: this is the case where the user explicitly specified
- % an SNI host name. We may need to add verify_fun for OTP 19
- maybe_add_verify_fun(lists:keymember(verify_fun, 1, Options), SniHost, Options).
+ % server_name_indication or explicitly specified an SNI host name.
+ Options.
maybe_add_sni_1(false, _Host, Options) ->
% NB: host is not a DNS host name, so nothing to add
Options;
maybe_add_sni_1(true, Host, Options) ->
- Opts1 = [{server_name_indication, Host} | Options],
- maybe_add_verify_fun(lists:keymember(verify_fun, 1, Opts1), Host, Opts1).
-
-maybe_add_verify_fun(true, _Host, Options) ->
- % NB: verify_fun already present, don't add twice
- Options;
-maybe_add_verify_fun(false, Host, Options) ->
- add_verify_fun_to_opts(lists:keyfind(verify, 1, Options), Host, Options).
+ [{server_name_indication, Host} | Options].
maybe_add_verify(Options) ->
case lists:keymember(verify, 1, Options) of
@@ -54,33 +42,10 @@ maybe_add_verify(Options) ->
% NB: user has explicitly set 'verify'
Options;
_ ->
- ?LOG_WARN("Connection (~p): Certificate chain verification is not enabled for this TLS connection. "
- "Please see https://rabbitmq.com/ssl.html for more information.~n", [self()]),
+ ?LOG_WARN("Connection (~p): certificate chain verification is not enabled for this TLS connection. "
+ "Please see https://rabbitmq.com/ssl.html for more information.", [self()]),
Options
end.
- % TODO FUTURE 3.8.x
- % verify_peer will become the default in RabbitMQ 3.8.0
- % false ->
- % [{verify, verify_peer} | Options]
- % end.
-
-add_verify_fun_to_opts(Host, Options) ->
- add_verify_fun_to_opts(false, Host, Options).
-
-add_verify_fun_to_opts({verify, verify_none}, _Host, Options) ->
- % NB: this is the case where the user explicitly disabled
- % certificate chain verification so there's not much sense
- % in adding verify_fun
- Options;
-add_verify_fun_to_opts(_, Host, Options) ->
- % NB: this is the case where the user either did not
- % set the verify option or set it to verify_peer
- case erlang:system_info(otp_release) of
- "19" ->
- F = fun ?MODULE:verify_fun/3,
- [{verify_fun, {F, Host}} | Options];
- _ -> Options
- end.
-type hostname() :: nonempty_string() | binary().
diff --git a/deps/amqp_client/src/amqp_sup.erl b/deps/amqp_client/src/amqp_sup.erl
index 05bc8e4185..a038aa48d0 100644
--- a/deps/amqp_client/src/amqp_sup.erl
+++ b/deps/amqp_client/src/amqp_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @private
diff --git a/deps/amqp_client/src/amqp_uri.erl b/deps/amqp_client/src/amqp_uri.erl
index ff545a48b2..408d52fdeb 100644
--- a/deps/amqp_client/src/amqp_uri.erl
+++ b/deps/amqp_client/src/amqp_uri.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp_uri).
diff --git a/deps/amqp_client/src/amqp_util.erl b/deps/amqp_client/src/amqp_util.erl
index df7ce30662..996fbcd7f4 100644
--- a/deps/amqp_client/src/amqp_util.erl
+++ b/deps/amqp_client/src/amqp_util.erl
@@ -2,16 +2,24 @@
-include("amqp_client_internal.hrl").
--export([call_timeout/0]).
+-export([call_timeout/0, update_call_timeout/1, safe_call_timeout/1]).
call_timeout() ->
case get(gen_server_call_timeout) of
undefined ->
Timeout = rabbit_misc:get_env(amqp_client,
gen_server_call_timeout,
- 60000),
+ safe_call_timeout(60000)),
put(gen_server_call_timeout, Timeout),
Timeout;
Timeout ->
Timeout
end.
+
+update_call_timeout(Timeout) ->
+ application:set_env(amqp_client, gen_server_call_timeout, Timeout),
+ put(gen_server_call_timeout, Timeout),
+ ok.
+
+safe_call_timeout(Threshold) ->
+ Threshold + ?CALL_TIMEOUT_DEVIATION.
diff --git a/deps/amqp_client/src/rabbit_routing_util.erl b/deps/amqp_client/src/rabbit_routing_util.erl
index 9d64a1468e..d978230b65 100644
--- a/deps/amqp_client/src/rabbit_routing_util.erl
+++ b/deps/amqp_client/src/rabbit_routing_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2013-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2013-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_routing_util).
diff --git a/deps/amqp_client/src/uri_parser.erl b/deps/amqp_client/src/uri_parser.erl
index a2bb98dad7..cf73637681 100644
--- a/deps/amqp_client/src/uri_parser.erl
+++ b/deps/amqp_client/src/uri_parser.erl
@@ -1,7 +1,7 @@
%% This file is a copy of http_uri.erl from the R13B-1 Erlang/OTP
%% distribution with several modifications.
-%% All modifications are Copyright (c) 2009-2020 VMware, Inc. or its affiliates.
+%% All modifications are Copyright (c) 2009-2021 VMware, Inc. or its affiliates.
%% ``The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
diff --git a/deps/amqp_client/test/system_SUITE.erl b/deps/amqp_client/test/system_SUITE.erl
index 9e39e468b7..6e6bc3f93a 100644
--- a/deps/amqp_client/test/system_SUITE.erl
+++ b/deps/amqp_client/test/system_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
@@ -77,9 +77,10 @@ all() ->
{hard_error_loop, [{repeat, 100}, parallel], [hard_error]}
]).
-define(COMMON_NON_PARALLEL_TEST_CASES, [
- basic_qos, %% Not parallel because it's time-based.
+ basic_qos, %% Not parallel because it's time-based, or has mocks
connection_failure,
- channel_death
+ channel_death,
+ safe_call_timeouts
]).
groups() ->
@@ -128,8 +129,12 @@ end_per_suite(Config) ->
] ++ rabbit_ct_broker_helpers:teardown_steps()).
ensure_amqp_client_srcdir(Config) ->
- rabbit_ct_helpers:ensure_application_srcdir(Config,
- amqp_client, amqp_client).
+ case rabbit_ct_helpers:get_config(Config, rabbitmq_run_cmd) of
+ undefined ->
+ rabbit_ct_helpers:ensure_application_srcdir(Config,
+ amqp_client, amqp_client);
+ _ -> Config
+ end.
create_unauthorized_user(Config) ->
Cmd = ["add_user", ?UNAUTHORIZED_USER, ?UNAUTHORIZED_USER],
@@ -294,6 +299,111 @@ named_connection(Config) ->
%% -------------------------------------------------------------------
+safe_call_timeouts(Config) ->
+ Params = ?config(amqp_client_conn_params, Config),
+ safe_call_timeouts_test(Params).
+
+safe_call_timeouts_test(Params = #amqp_params_network{}) ->
+ TestConnTimeout = 2000,
+ TestCallTimeout = 1000,
+
+ Params1 = Params#amqp_params_network{connection_timeout = TestConnTimeout},
+
+ %% Normal connection
+ amqp_util:update_call_timeout(TestCallTimeout),
+
+ {ok, Connection1} = amqp_connection:start(Params1),
+ ?assertEqual(TestConnTimeout + ?CALL_TIMEOUT_DEVIATION, amqp_util:call_timeout()),
+
+ ?assertEqual(ok, amqp_connection:close(Connection1)),
+ wait_for_death(Connection1),
+
+ %% Failing connection
+ amqp_util:update_call_timeout(TestCallTimeout),
+
+ ok = meck:new(amqp_network_connection, [passthrough]),
+ ok = meck:expect(amqp_network_connection, connect,
+ fun(_AmqpParams, _SIF, _TypeSup, _State) ->
+ timer:sleep(TestConnTimeout),
+ {error, test_connection_timeout}
+ end),
+
+ ?assertEqual({error, test_connection_timeout}, amqp_connection:start(Params1)),
+
+ ?assertEqual(TestConnTimeout + ?CALL_TIMEOUT_DEVIATION, amqp_util:call_timeout()),
+
+ meck:unload(amqp_network_connection);
+
+safe_call_timeouts_test(Params = #amqp_params_direct{}) ->
+ TestCallTimeout = 30000,
+ NetTicktime0 = net_kernel:get_net_ticktime(),
+ amqp_util:update_call_timeout(TestCallTimeout),
+
+ %% 1. NetTicktime >= DIRECT_OPERATION_TIMEOUT (120s)
+ NetTicktime1 = 140,
+ net_kernel:set_net_ticktime(NetTicktime1, 1),
+ wait_until_net_ticktime(NetTicktime1),
+
+ {ok, Connection1} = amqp_connection:start(Params),
+ ?assertEqual((NetTicktime1 * 1000) + ?CALL_TIMEOUT_DEVIATION,
+ amqp_util:call_timeout()),
+
+ ?assertEqual(ok, amqp_connection:close(Connection1)),
+ wait_for_death(Connection1),
+
+ %% Reset call timeout
+ amqp_util:update_call_timeout(TestCallTimeout),
+
+ %% 2. Transitioning NetTicktime >= DIRECT_OPERATION_TIMEOUT (120s)
+ NetTicktime2 = 120,
+ net_kernel:set_net_ticktime(NetTicktime2, 1),
+ ?assertEqual({ongoing_change_to, NetTicktime2}, net_kernel:get_net_ticktime()),
+
+ {ok, Connection2} = amqp_connection:start(Params),
+ ?assertEqual((NetTicktime2 * 1000) + ?CALL_TIMEOUT_DEVIATION,
+ amqp_util:call_timeout()),
+
+ wait_until_net_ticktime(NetTicktime2),
+
+ ?assertEqual(ok, amqp_connection:close(Connection2)),
+ wait_for_death(Connection2),
+
+ %% Reset call timeout
+ amqp_util:update_call_timeout(TestCallTimeout),
+
+ %% 3. NetTicktime < DIRECT_OPERATION_TIMEOUT (120s)
+ NetTicktime3 = 60,
+ net_kernel:set_net_ticktime(NetTicktime3, 1),
+ wait_until_net_ticktime(NetTicktime3),
+
+ {ok, Connection3} = amqp_connection:start(Params),
+ ?assertEqual((?DIRECT_OPERATION_TIMEOUT + ?CALL_TIMEOUT_DEVIATION),
+ amqp_util:call_timeout()),
+
+ net_kernel:set_net_ticktime(NetTicktime0, 1),
+ wait_until_net_ticktime(NetTicktime0),
+ ?assertEqual(ok, amqp_connection:close(Connection3)),
+ wait_for_death(Connection3),
+
+ %% Failing direct connection
+ amqp_util:update_call_timeout(_LowCallTimeout = 1000),
+
+ ok = meck:new(amqp_direct_connection, [passthrough]),
+ ok = meck:expect(amqp_direct_connection, connect,
+ fun(_AmqpParams, _SIF, _TypeSup, _State) ->
+ timer:sleep(2000),
+ {error, test_connection_timeout}
+ end),
+
+ ?assertEqual({error, test_connection_timeout}, amqp_connection:start(Params)),
+
+ ?assertEqual((?DIRECT_OPERATION_TIMEOUT + ?CALL_TIMEOUT_DEVIATION),
+ amqp_util:call_timeout()),
+
+ meck:unload(amqp_direct_connection).
+
+%% -------------------------------------------------------------------
+
simultaneous_close(Config) ->
{ok, Connection} = new_connection(Config),
%% We pick a high channel number, to avoid any conflict with other
@@ -1298,14 +1408,10 @@ channel_death(Config) ->
{ok, Connection} = new_connection(Config),
{ok, Channel} = amqp_connection:open_channel(Connection),
try
- Ret = amqp_channel:call(Channel, bogus_message),
+ Ret = amqp_channel:call(Channel, {bogus_message, 123}),
throw({unexpected_success, Ret})
catch
- exit:{{badarg,
- [{amqp_channel, is_connection_method, 1, _} | _]}, _} -> ok;
- exit:{{badarg,
- [{erlang, element, [1, bogus_message], []},
- {amqp_channel, is_connection_method, 1, _} | _]}, _} -> ok
+ exit:{{unknown_method_name, bogus_message}, _} -> ok
end,
wait_for_death(Channel),
wait_for_death(Connection).
@@ -1456,30 +1562,38 @@ assert_down_with_error(MonitorRef, CodeAtom) ->
exit(did_not_die)
end.
-set_resource_alarm(memory, Config) ->
- SrcDir = ?config(amqp_client_srcdir, Config),
- Nodename = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- {ok, _} = rabbit_ct_helpers:make(Config, SrcDir, [
- {"RABBITMQ_NODENAME=~s", [Nodename]},
- "set-resource-alarm", "SOURCE=memory"]);
-set_resource_alarm(disk, Config) ->
- SrcDir = ?config(amqp_client_srcdir, Config),
- Nodename = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- {ok, _} = rabbit_ct_helpers:make(Config, SrcDir, [
- {"RABBITMQ_NODENAME=~s", [Nodename]},
- "set-resource-alarm", "SOURCE=disk"]).
+wait_until_net_ticktime(NetTicktime) ->
+ case net_kernel:get_net_ticktime() of
+ NetTicktime -> ok;
+ {ongoing_change_to, NetTicktime} ->
+ timer:sleep(1000),
+ wait_until_net_ticktime(NetTicktime);
+ _ ->
+ throw({error, {net_ticktime_not_set, NetTicktime}})
+ end.
-clear_resource_alarm(memory, Config) ->
+set_resource_alarm(Resource, Config)
+ when Resource =:= memory orelse Resource =:= disk ->
SrcDir = ?config(amqp_client_srcdir, Config),
Nodename = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- {ok, _}= rabbit_ct_helpers:make(Config, SrcDir, [
- {"RABBITMQ_NODENAME=~s", [Nodename]},
- "clear-resource-alarm", "SOURCE=memory"]);
-clear_resource_alarm(disk, Config) ->
+ Cmd = [{"RABBITMQ_NODENAME=~s", [Nodename]},
+ "set-resource-alarm",
+ {"SOURCE=~s", [Resource]}],
+ {ok, _} = case os:getenv("RABBITMQ_RUN") of
+ false -> rabbit_ct_helpers:make(Config, SrcDir, Cmd);
+ Run -> rabbit_ct_helpers:exec([Run | Cmd])
+ end.
+
+clear_resource_alarm(Resource, Config)
+ when Resource =:= memory orelse Resource =:= disk ->
SrcDir = ?config(amqp_client_srcdir, Config),
Nodename = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- {ok, _}= rabbit_ct_helpers:make(Config, SrcDir, [
- {"RABBITMQ_NODENAME=~s", [Nodename]},
- "clear-resource-alarm", "SOURCE=disk"]).
+ Cmd = [{"RABBITMQ_NODENAME=~s", [Nodename]},
+ "clear-resource-alarm",
+ {"SOURCE=~s", [Resource]}],
+ {ok, _} = case os:getenv("RABBITMQ_RUN") of
+ false -> rabbit_ct_helpers:make(Config, SrcDir, Cmd);
+ Run -> rabbit_ct_helpers:exec([Run | Cmd])
+ end.
fmt(Fmt, Args) -> list_to_binary(rabbit_misc:format(Fmt, Args)).
diff --git a/deps/amqp_client/test/unit_SUITE.erl b/deps/amqp_client/test/unit_SUITE.erl
index 48b4b9de13..6d7466f420 100644
--- a/deps/amqp_client/test/unit_SUITE.erl
+++ b/deps/amqp_client/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
@@ -139,18 +139,6 @@ amqp_uri_parsing(_Config) ->
],
?assertEqual(lists:usort(Exp1), lists:usort(TLSOpts1)),
- {ok, #amqp_params_network{host = "host2", ssl_options = TLSOpts2}} =
- amqp_uri:parse("amqps://host2/%2F?verify=verify_peer"
- "&server_name_indication=host2"
- "&cacertfile=/path/to/cacertfile.pem"
- "&certfile=/path/to/certfile.pem"),
- Opts2 = [{certfile, "/path/to/certfile.pem"},
- {cacertfile,"/path/to/cacertfile.pem"},
- {server_name_indication, "host2"},
- {verify, verify_peer}],
- Exp2 = amqp_ssl:add_verify_fun_to_opts("host2", Opts2),
- ?assertEqual(lists:usort(Exp2), lists:usort(TLSOpts2)),
-
{ok, #amqp_params_network{host = "host3", ssl_options = TLSOpts3}} =
amqp_uri:parse("amqps://host3/%2f?verify=verify_peer"
"&fail_if_no_peer_cert=true"),
@@ -171,22 +159,6 @@ amqp_uri_parsing(_Config) ->
{server_name_indication,"host4"}],
?assertEqual(lists:usort(Exp4), lists:usort(TLSOpts4)),
- {ok, #amqp_params_network{host = "host5", ssl_options = TLSOpts5}} =
- amqp_uri:parse("amqps://host5/%2f?server_name_indication=foobar"
- "&verify=verify_peer"),
- Opts5 = [{server_name_indication, "foobar"},
- {verify, verify_peer}],
- Exp5 = amqp_ssl:add_verify_fun_to_opts("foobar", Opts5),
- ?assertEqual(lists:usort(Exp5), lists:usort(TLSOpts5)),
-
- {ok, #amqp_params_network{host = "127.0.0.1", ssl_options = TLSOpts6}} =
- amqp_uri:parse("amqps://127.0.0.1/%2f?server_name_indication=barbaz"
- "&verify=verify_peer"),
- Opts6 = [{server_name_indication, "barbaz"},
- {verify, verify_peer}],
- Exp6 = amqp_ssl:add_verify_fun_to_opts("barbaz", Opts6),
- ?assertEqual(lists:usort(Exp6), lists:usort(TLSOpts6)),
-
{ok, #amqp_params_network{host = "host7", ssl_options = TLSOpts7}} =
amqp_uri:parse("amqps://host7/%2f?server_name_indication=disable"),
?assertEqual(lists:usort([{server_name_indication, disable}]),
diff --git a/deps/rabbit/BUILD.bazel b/deps/rabbit/BUILD.bazel
new file mode 100644
index 0000000000..c7d8687498
--- /dev/null
+++ b/deps/rabbit/BUILD.bazel
@@ -0,0 +1,1080 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_ERLC_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+load(":bats.bzl", "bats")
+
+exports_files([
+ "scripts/rabbitmq-defaults",
+ "scripts/rabbitmq-diagnostics",
+ "scripts/rabbitmq-env",
+ "scripts/rabbitmq-plugins",
+ "scripts/rabbitmq-queues",
+ "scripts/rabbitmq-server",
+ "scripts/rabbitmqctl",
+ "INSTALL",
+])
+
+_APP_ENV = """[
+ {tcp_listeners, [5672]},
+ {num_tcp_acceptors, 10},
+ {ssl_listeners, []},
+ {num_ssl_acceptors, 10},
+ {ssl_options, []},
+ {vm_memory_high_watermark, 0.4},
+ {vm_memory_high_watermark_paging_ratio, 0.5},
+ {vm_memory_calculation_strategy, rss},
+ {memory_monitor_interval, 2500},
+ {disk_free_limit, 50000000}, %% 50MB
+ {msg_store_index_module, rabbit_msg_store_ets_index},
+ {backing_queue_module, rabbit_variable_queue},
+ %% 0 ("no limit") would make a better default, but that
+ %% breaks the QPid Java client
+ {frame_max, 131072},
+ %% see rabbitmq-server#1593
+ {channel_max, 2047},
+ {connection_max, infinity},
+ {heartbeat, 60},
+ {msg_store_file_size_limit, 16777216},
+ {msg_store_shutdown_timeout, 600000},
+ {fhc_write_buffering, true},
+ {fhc_read_buffering, false},
+ {queue_index_max_journal_entries, 32768},
+ {queue_index_embed_msgs_below, 4096},
+ {default_user, <<"guest">>},
+ {default_pass, <<"guest">>},
+ {default_user_tags, [administrator]},
+ {default_vhost, <<"/">>},
+ {default_permissions, [<<".*">>, <<".*">>, <<".*">>]},
+ {loopback_users, [<<"guest">>]},
+ {password_hashing_module, rabbit_password_hashing_sha256},
+ {server_properties, []},
+ {collect_statistics, none},
+ {collect_statistics_interval, 5000},
+ {mnesia_table_loading_retry_timeout, 30000},
+ {mnesia_table_loading_retry_limit, 10},
+ {auth_mechanisms, ['PLAIN', 'AMQPLAIN']},
+ {auth_backends, [rabbit_auth_backend_internal]},
+ {delegate_count, 16},
+ {trace_vhosts, []},
+ {ssl_cert_login_from, distinguished_name},
+ {ssl_handshake_timeout, 5000},
+ {ssl_allow_poodle_attack, false},
+ {handshake_timeout, 10000},
+ {reverse_dns_lookups, false},
+ {cluster_partition_handling, ignore},
+ {cluster_keepalive_interval, 10000},
+ {autoheal_state_transition_timeout, 60000},
+ {tcp_listen_options, [{backlog, 128},
+ {nodelay, true},
+ {linger, {true, 0}},
+ {exit_on_close, false}
+ ]},
+ {halt_on_upgrade_failure, true},
+ {ssl_apps, [asn1, crypto, public_key, ssl]},
+ %% see rabbitmq-server#114
+ {mirroring_flow_control, true},
+ {mirroring_sync_batch_size, 4096},
+ %% see rabbitmq-server#227 and related tickets.
+ %% msg_store_credit_disc_bound only takes effect when
+ %% messages are persisted to the message store. If messages
+ %% are embedded on the queue index, then modifying this
+ %% setting has no effect because credit_flow is not used when
+ %% writing to the queue index. See the setting
+ %% queue_index_embed_msgs_below above.
+ {msg_store_credit_disc_bound, {4000, 800}},
+ {msg_store_io_batch_size, 4096},
+ %% see rabbitmq-server#143,
+ %% rabbitmq-server#949, rabbitmq-server#1098
+ {credit_flow_default_credit, {400, 200}},
+ {quorum_commands_soft_limit, 32},
+ {quorum_cluster_size, 3},
+ %% see rabbitmq-server#248
+ %% and rabbitmq-server#667
+ {channel_operation_timeout, 15000},
+
+ %% see rabbitmq-server#486
+ {autocluster,
+ [{peer_discovery_backend, rabbit_peer_discovery_classic_config}]
+ },
+ %% used by rabbit_peer_discovery_classic_config
+ {cluster_nodes, {[], disc}},
+
+ {config_entry_decoder, [{passphrase, undefined}]},
+
+ %% rabbitmq-server#973
+ {queue_explicit_gc_run_operation_threshold, 1000},
+ {lazy_queue_explicit_gc_run_operation_threshold, 1000},
+ {background_gc_enabled, false},
+ {background_gc_target_interval, 60000},
+ %% rabbitmq-server#589
+ {proxy_protocol, false},
+ {disk_monitor_failure_retries, 10},
+ {disk_monitor_failure_retry_interval, 120000},
+ %% either "stop_node" or "continue".
+ %% by default we choose to not terminate the entire node if one
+ %% vhost had to shut down, see server#1158 and server#1280
+ {vhost_restart_strategy, continue},
+ %% {global, prefetch count}
+ {default_consumer_prefetch, {false, 0}},
+ %% interval at which the channel can perform periodic actions
+ {channel_tick_interval, 60000},
+ %% Default max message size is 128 MB
+ {max_message_size, 134217728},
+ %% Socket writer will run GC every 1 GB of outgoing data
+ {writer_gc_threshold, 1000000000},
+ %% interval at which connection/channel tracking executes post operations
+ {tracking_execution_timeout, 15000},
+ {stream_messages_soft_limit, 256},
+ {track_auth_attempt_source, false}
+ ]
+"""
+
+DEPS = [
+ "//deps/amqp10_common:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@ra//:bazel_erlang_lib",
+ "@ranch//:bazel_erlang_lib",
+ "@stdout_formatter//:bazel_erlang_lib",
+ "@syslog//:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit/apps/rabbitmq_prelaunch:bazel_erlang_lib",
+ "@cuttlefish//:bazel_erlang_lib",
+ "@observer_cli//:bazel_erlang_lib",
+ "@osiris//:bazel_erlang_lib",
+ "@recon//:bazel_erlang_lib",
+ "@seshat//:bazel_erlang_lib",
+ "@sysmon_handler//:bazel_erlang_lib",
+ "@systemd//:bazel_erlang_lib",
+]
+
+APP_MODULE = "rabbit"
+
+APP_REGISTERED = [
+ "rabbit_amqqueue_sup",
+ "rabbit_direct_client_sup",
+ "rabbit_log",
+ "rabbit_node_monitor",
+ "rabbit_router",
+]
+
+EXTRA_APPS = [
+ "sasl",
+ "rabbitmq_prelaunch",
+ "os_mon",
+ "inets",
+ "compiler",
+ "public_key",
+ "crypto",
+ "ssl",
+ "syntax_tools",
+ "xmerl",
+]
+
+FIRST_SRCS = [
+ "src/rabbit_tracking.erl",
+ "src/rabbit_queue_type.erl",
+ "src/rabbit_credential_validator.erl",
+ "src/rabbit_policy_merge_strategy.erl",
+ "src/rabbit_queue_master_locator.erl",
+]
+
+EXTRA_ERLC_OPTS = [
+ "-DINSTR_MOD=gm",
+]
+
+rabbitmq_lib(
+ app_description = "RabbitMQ",
+ app_env = _APP_ENV,
+ app_module = APP_MODULE,
+ app_name = "rabbit",
+ app_registered = APP_REGISTERED,
+ erlc_opts = RABBITMQ_ERLC_OPTS + EXTRA_ERLC_OPTS,
+ extra_apps = EXTRA_APPS,
+ first_srcs = FIRST_SRCS,
+ test_erlc_opts = RABBITMQ_TEST_ERLC_OPTS + EXTRA_ERLC_OPTS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt_apps = [
+ "mnesia",
+] + EXTRA_APPS
+
+plt_apps.remove("rabbitmq_prelaunch")
+
+plt(
+ name = "base_plt",
+ apps = plt_apps,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+bats(
+ srcs = glob(["test/**/*.bats"]),
+ data = glob(
+ ["scripts/*"],
+ exclude = ["scripts/*.bat"],
+ ),
+)
+
+rabbitmq_home(
+ name = "broker-for-tests-home",
+ testonly = True,
+ plugins = [
+ ":test_bazel_erlang_lib",
+ "//deps/rabbitmq_ct_client_helpers:bazel_erlang_lib",
+ "@inet_tcp_proxy//:bazel_erlang_lib",
+ "@meck//:bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ testonly = True,
+ home = ":broker-for-tests-home",
+)
+
+erlc(
+ name = "quorum_queue_utils",
+ testonly = True,
+ srcs = [
+ "test/quorum_queue_utils.erl",
+ ],
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+)
+
+erlc(
+ name = "rabbit_ha_test_consumer",
+ testonly = True,
+ srcs = [
+ "test/rabbit_ha_test_consumer.erl",
+ ],
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+)
+
+erlc(
+ name = "rabbit_ha_test_producer",
+ testonly = True,
+ srcs = [
+ "test/rabbit_ha_test_producer.erl",
+ ],
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+)
+
+erlc(
+ name = "test_util",
+ testonly = True,
+ srcs = [
+ "test/test_util.erl",
+ ],
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+)
+
+PACKAGE = "deps/rabbit"
+
+suites = [
+ rabbitmq_suite(
+ name = "amqqueue_backward_compatibility_SUITE",
+ size = "small",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "backing_queue_SUITE",
+ size = "large",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "channel_interceptor_SUITE",
+ size = "medium",
+ additional_srcs = [
+ "test/dummy_interceptor.erl",
+ "test/failing_dummy_interceptor.erl",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "channel_operation_timeout_SUITE",
+ size = "medium",
+ additional_srcs = [
+ "test/channel_operation_timeout_test_queue.erl",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "cluster_rename_SUITE",
+ size = "large",
+ flaky = True,
+ shard_count = 2,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "cluster_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "clustering_management_SUITE",
+ size = "large",
+ flaky = True,
+ shard_count = 19,
+ sharding_method = "case",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ size = "medium",
+ data = [
+ "test/definition_import_SUITE_data/case1.json",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "confirms_rejects_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "consumer_timeout_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "crashing_queues_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "dead_lettering_SUITE",
+ size = "large",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ flaky = True,
+ shard_count = 3,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "definition_import_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "disconnect_detected_during_alarm_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "dynamic_ha_SUITE",
+ size = "large",
+ flaky = True,
+ shard_count = 20,
+ sharding_method = "case",
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "dynamic_qq_SUITE",
+ size = "large",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ flaky = True,
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "eager_sync_SUITE",
+ size = "large",
+ additional_beam = [
+ ":sync_detection_SUITE_beam_files",
+ ],
+ flaky = True,
+ shard_count = 5,
+ sharding_method = "case",
+ tags = ["classic-queue"],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "feature_flags_SUITE",
+ size = "large",
+ flaky = True,
+ shard_count = 5,
+ runtime_deps = [
+ "//deps/rabbit/test/feature_flags_SUITE_data/my_plugin:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "feature_flags_with_unpriveleged_user_SUITE",
+ size = "large",
+ additional_beam = [
+ ":feature_flags_SUITE_beam_files",
+ ],
+ flaky = True,
+ shard_count = 2,
+ # The enabling_* tests chmod files and then expect writes to be blocked.
+ # This probably doesn't work because we are root in the remote docker image.
+ tags = ["exclusive"],
+ runtime_deps = [
+ "//deps/rabbit/test/feature_flags_SUITE_data/my_plugin:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "lazy_queue_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "list_consumers_sanity_check_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "list_queues_online_and_offline_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "logging_SUITE",
+ runtime_deps = [
+ "@syslog//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "maintenance_mode_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "many_node_ha_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":rabbit_ha_test_consumer",
+ ":rabbit_ha_test_producer",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "message_size_limit_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "metrics_SUITE",
+ size = "medium",
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "mirrored_supervisor_SUITE",
+ size = "small",
+ additional_srcs = [
+ "test/mirrored_supervisor_SUITE_gs.erl",
+ ],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "msg_store_SUITE",
+ size = "small",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "peer_discovery_classic_config_SUITE",
+ size = "medium",
+ flaky = True,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "peer_discovery_dns_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "per_user_connection_channel_limit_partitions_SUITE",
+ size = "large",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "per_user_connection_channel_limit_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "per_user_connection_channel_tracking_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "per_user_connection_tracking_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "per_vhost_connection_limit_partitions_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "per_vhost_connection_limit_SUITE",
+ size = "medium",
+ flaky = True,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "per_vhost_msg_store_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "per_vhost_queue_limit_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "policy_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "priority_queue_recovery_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "priority_queue_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "product_info_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "proxy_protocol_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "publisher_confirms_parallel_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "queue_length_limits_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "queue_master_location_SUITE",
+ size = "large",
+ shard_count = 2,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "queue_parallel_SUITE",
+ size = "large",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ flaky = True,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "queue_type_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "quorum_queue_SUITE",
+ size = "large",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ flaky = True,
+ shard_count = 6,
+ ),
+ rabbitmq_suite(
+ name = "rabbit_confirms_SUITE",
+ size = "small",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_core_metrics_gc_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_suite(
+ name = "rabbit_fifo_int_SUITE",
+ size = "medium",
+ runtime_deps = [
+ "@aten//:bazel_erlang_lib",
+ "@gen_batch_server//:bazel_erlang_lib",
+ "@meck//:bazel_erlang_lib",
+ "@ra//:bazel_erlang_lib",
+ ],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "rabbit_fifo_prop_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":test_util",
+ ],
+ additional_hdrs = [
+ "src/rabbit_fifo.hrl",
+ ],
+ erlc_opts = [
+ "-I deps/rabbit", # allow rabbit_fifo.hrl to be included at src/rabbit_fifo.hrl
+ ],
+ runtime_deps = [
+ "@ra//:bazel_erlang_lib",
+ ],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "rabbit_fifo_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":test_util",
+ ":rabbit_fifo_v0_SUITE_beam_files",
+ ],
+ additional_hdrs = [
+ "src/rabbit_fifo.hrl",
+ ],
+ runtime_deps = [
+ "@meck//:bazel_erlang_lib",
+ "@ra//:bazel_erlang_lib",
+ ],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "rabbit_fifo_v0_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":test_util",
+ ],
+ additional_hdrs = [
+ "src/rabbit_fifo_v0.hrl",
+ ],
+ erlc_opts = [
+ "-I deps/rabbit", # allow rabbit_fifo.hrl to be included at src/rabbit_fifo.hrl
+ ],
+ runtime_deps = [
+ "@meck//:bazel_erlang_lib",
+ "@ra//:bazel_erlang_lib",
+ ],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "rabbit_msg_record_SUITE",
+ size = "medium",
+ deps = [
+ "//deps/amqp10_common:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "rabbit_stream_coordinator_SUITE",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_stream_queue_SUITE",
+ size = "large",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ flaky = True,
+ shard_count = 12,
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbitmq_queues_cli_integration_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbitmqctl_integration_SUITE",
+ size = "medium",
+ flaky = True,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbitmqctl_shutdown_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "signal_handling_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "simple_ha_SUITE",
+ size = "large",
+ additional_beam = [
+ ":rabbit_ha_test_consumer",
+ ":rabbit_ha_test_producer",
+ ],
+ shard_count = 4,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "single_active_consumer_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":quorum_queue_utils",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "sync_detection_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "term_to_binary_compat_prop_SUITE",
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "topic_permission_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_access_control_authn_authz_context_propagation_SUITE",
+ size = "medium",
+ additional_srcs = [
+ "test/rabbit_auth_backend_context_propagation_mock.erl",
+ "test/rabbit_foo_protocol_connection_info.erl",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_access_control_credential_validation_SUITE",
+ size = "medium",
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_access_control_SUITE",
+ size = "medium",
+ additional_srcs = [
+ "test/rabbit_dummy_protocol_connection_info.erl",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_amqp091_content_framing_SUITE",
+ size = "small",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_amqp091_server_properties_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_app_management_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_suite(
+ name = "unit_cluster_formation_locking_mocks_SUITE",
+ size = "small",
+ runtime_deps = [
+ "@meck//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_collections_SUITE",
+ size = "small",
+ runtime_deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_config_value_encryption_SUITE",
+ size = "medium",
+ runtime_deps = [
+ "//deps/rabbit/apps/rabbitmq_prelaunch:test_bazel_erlang_lib",
+ "//deps/rabbit_common:test_bazel_erlang_lib",
+ "@credentials_obfuscation//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_connection_tracking_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_credit_flow_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_disk_monitor_mocks_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_disk_monitor_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_file_handle_cache_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_gen_server2_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_suite(
+ name = "unit_gm_SUITE",
+ size = "small",
+ runtime_deps = [
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ "@meck//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_log_management_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_suite(
+ name = "unit_operator_policy_SUITE",
+ size = "small",
+ deps = [
+ "//deps/rabbit_common:test_bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_pg_local_SUITE",
+ size = "small",
+ ),
+ rabbitmq_suite(
+ name = "unit_plugin_directories_SUITE",
+ size = "small",
+ deps = [
+ "//deps/rabbit_common:test_bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_plugin_versioning_SUITE",
+ size = "small",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_policy_validators_SUITE",
+ size = "small",
+ ),
+ rabbitmq_suite(
+ name = "unit_priority_queue_SUITE",
+ size = "small",
+ ),
+ rabbitmq_suite(
+ name = "unit_queue_consumers_SUITE",
+ size = "small",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_stats_and_metrics_SUITE",
+ size = "medium",
+ additional_srcs = [
+ "test/dummy_event_receiver.erl",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_supervisor2_SUITE",
+ size = "small",
+ additional_srcs = [
+ "test/dummy_supervisor2.erl",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_vm_memory_monitor_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "upgrade_preparation_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "vhost_SUITE",
+ size = "medium",
+ flaky = True,
+ ),
+ rabbitmq_suite(
+ name = "unit_classic_mirrored_queue_sync_throttling_SUITE",
+ size = "small",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ]
+ ),
+ rabbitmq_suite(
+ name = "unit_classic_mirrored_queue_throughput_SUITE",
+ size = "small",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ]
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
+
+filegroup(
+ name = "manpages",
+ srcs = glob([
+ "docs/*.1",
+ "docs/*.2",
+ "docs/*.3",
+ "docs/*.4",
+ "docs/*.5",
+ "docs/*.6",
+ "docs/*.7",
+ "docs/*.8",
+ "docs/*.9",
+ ]),
+)
+
+genrule(
+ name = "manpages-dir",
+ srcs = [":manpages"],
+ outs = ["manpages.tar"],
+ cmd = """set -euo pipefail
+
+DESTDIR=manpages-tmp/share/man
+mkdir -p $${DESTDIR}
+for mp in $(SRCS); do
+ section=$${mp##*.}
+ mkdir -p $${DESTDIR}/man$$section
+ gzip < $$mp \\
+ > $${DESTDIR}/man$$section/$$(basename $$mp).gz
+done
+tar --strip-components 1 -cf $@ manpages-tmp/*
+rm -dr manpages-tmp
+""",
+ visibility = ["//visibility:public"],
+)
+
+genrule(
+ name = "web-manpages",
+ srcs = [":manpages"],
+ outs = ["web-manpages.tar"],
+ cmd = """set -euo pipefail
+
+mkdir web-manpages-tmp
+for mp in $(SRCS); do
+ d=web-manpages-tmp/$$(basename $${mp}).html
+ echo "Converting $$mp to $$d..."
+ mandoc -T html -O 'fragment,man=%N.%S.html' "$$mp" | \\
+ awk '\\
+ /^<table class="head">$$/ { remove_table=1; next; } \\
+ /^<table class="foot">$$/ { remove_table=1; next; } \\
+ /^<\\/table>$$/ { if (remove_table) { remove_table=0; next; } } \\
+ { if (!remove_table) { \\
+ line=$$0; \\
+ gsub(/<h2/, "<h3", line); \\
+ gsub(/<\\/h2>/, "</h3>", line); \\
+ gsub(/<h1/, "<h2", line); \\
+ gsub(/<\\/h1>/, "</h2>", line); \\
+ gsub(/class="D1"/, "class=\"D1 lang-bash\"", line); \\
+ gsub(/class="Bd Bd-indent"/, "class=\"Bd Bd-indent lang-bash\"", line); \\
+ gsub(/&#[xX]201[cCdD];/, "\\&quot;", line); \\
+ print line; \\
+ } } \\
+ ' > "$$d"
+done
+tar --strip-components 1 -cf $@ web-manpages-tmp/*
+rm -dr web-manpages-tmp
+""",
+ visibility = ["//visibility:public"],
+)
diff --git a/deps/rabbit/Makefile b/deps/rabbit/Makefile
index 7d2fae2ea5..df7a72dca5 100644
--- a/deps/rabbit/Makefile
+++ b/deps/rabbit/Makefile
@@ -81,10 +81,13 @@ define PROJECT_ENV
%% rabbitmq-server#949, rabbitmq-server#1098
{credit_flow_default_credit, {400, 200}},
{quorum_commands_soft_limit, 32},
- {quorum_cluster_size, 5},
+ {quorum_cluster_size, 3},
%% see rabbitmq-server#248
%% and rabbitmq-server#667
{channel_operation_timeout, 15000},
+ %% See https://www.rabbitmq.com/consumers.html#acknowledgement-timeout
+ %% 30 minutes
+ {consumer_timeout, 1800000},
%% see rabbitmq-server#486
{autocluster,
@@ -131,15 +134,17 @@ endef
APPS_DIR := $(CURDIR)/apps
LOCAL_DEPS = sasl rabbitmq_prelaunch os_mon inets compiler public_key crypto ssl syntax_tools xmerl
-BUILD_DEPS = rabbitmq_cli syslog
-DEPS = cuttlefish ranch lager rabbit_common ra sysmon_handler stdout_formatter recon observer_cli osiris amqp10_common
+
+BUILD_DEPS = rabbitmq_cli
+DEPS = ranch rabbit_common ra sysmon_handler stdout_formatter recon observer_cli osiris amqp10_common syslog systemd seshat
TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers amqp_client meck proper
PLT_APPS += mnesia
-dep_cuttlefish = hex 2.4.1
-dep_syslog = git https://github.com/schlagert/syslog 3.4.5
-dep_osiris = git https://github.com/rabbitmq/osiris master
+dep_syslog = git https://github.com/schlagert/syslog 4.0.0
+dep_osiris = git https://github.com/rabbitmq/osiris main
+dep_systemd = hex 0.6.1
+dep_seshat = git https://github.com/rabbitmq/seshat main
define usage_xml_to_erl
$(subst __,_,$(patsubst $(DOCS_DIR)/rabbitmq%.1.xml, src/rabbit_%_usage.erl, $(subst -,_,$(1))))
@@ -162,8 +167,8 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-build.mk \
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
# See above why we mess with `$(APPS_DIR)`.
unexport APPS_DIR
@@ -236,11 +241,14 @@ ct-slow: CT_SUITES = $(SLOW_CT_SUITES)
# --------------------------------------------------------------------
RMQ_ERLC_OPTS += -I $(DEPS_DIR)/rabbit_common/include
+EDOC_OPTS += {preprocess,true}
ifdef INSTRUMENT_FOR_QC
RMQ_ERLC_OPTS += -DINSTR_MOD=gm_qc
+EDOC_OPTS += ,{macros,[{'INSTR_MOD',gm_qc}]}
else
RMQ_ERLC_OPTS += -DINSTR_MOD=gm
+EDOC_OPTS += ,{macros,[{'INSTR_MOD',gm}]}
endif
ifdef CREDIT_FLOW_TRACING
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/.gitignore b/deps/rabbit/apps/rabbitmq_prelaunch/.gitignore
index adca0d7655..aaf249068c 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/.gitignore
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/.gitignore
@@ -5,6 +5,7 @@
*.coverdata
/ebin/
/.erlang.mk/
+/logs/
/rabbitmq_prelaunch.d
/xrefr
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/BUILD.bazel b/deps/rabbit/apps/rabbitmq_prelaunch/BUILD.bazel
new file mode 100644
index 0000000000..a7cc377e56
--- /dev/null
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/BUILD.bazel
@@ -0,0 +1,45 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load("//:rabbitmq.bzl", "APP_VERSION", "rabbitmq_lib", "rabbitmq_suite")
+
+APP_NAME = "rabbitmq_prelaunch"
+
+APP_DESCRIPTION = "RabbitMQ prelaunch setup"
+
+APP_MODULE = "rabbit_prelaunch_app"
+
+RUNTIME_DEPS = [
+ "@cuttlefish//:bazel_erlang_lib",
+ "@jsx//:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ app_version = APP_VERSION,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(
+ additional_libs = [
+ "@ranch//:bazel_erlang_lib",
+ "@systemd//:bazel_erlang_lib",
+ "@osiris//:bazel_erlang_lib",
+ ],
+ tags = ["xref"],
+)
+
+dialyze(
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+rabbitmq_suite(
+ name = "rabbit_logger_std_h_SUITE",
+)
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/Makefile b/deps/rabbit/apps/rabbitmq_prelaunch/Makefile
index 572f7703d4..c95d097907 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/Makefile
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/Makefile
@@ -3,9 +3,10 @@ PROJECT_DESCRIPTION = RabbitMQ prelaunch setup
PROJECT_VERSION = 1.0.0
PROJECT_MOD = rabbit_prelaunch_app
-DEPS = rabbit_common lager
+DEPS = rabbit_common cuttlefish jsx
+dep_cuttlefish = hex 3.0.1
DEP_PLUGINS = rabbit_common/mk/rabbitmq-build.mk
-include ../../rabbitmq-components.mk
-include ../../erlang.mk
+include ../../../../rabbitmq-components.mk
+include ../../../../erlang.mk
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state.erl
index c76824e7be..7aeffec830 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state.erl
@@ -3,20 +3,29 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_boot_state).
+-include_lib("kernel/include/logger.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbit_common/include/logging.hrl").
+
-export([get/0,
set/1,
- wait_for/2]).
+ wait_for/2,
+ has_reached/1,
+ has_reached_and_is_active/1]).
--define(PT_KEY_BOOT_STATE, {?MODULE, boot_state}).
+-define(PT_KEY_BOOT_STATE, {?MODULE, boot_state}).
--type boot_state() :: 'stopped' | 'booting' | 'ready' | 'stopping'.
+-type boot_state() :: stopped |
+ booting |
+ core_started |
+ ready |
+ stopping.
-export_type([boot_state/0]).
@@ -26,7 +35,8 @@ get() ->
-spec set(boot_state()) -> ok.
set(BootState) ->
- rabbit_log_prelaunch:debug("Change boot state to `~s`", [BootState]),
+ ?LOG_DEBUG("Change boot state to `~s`", [BootState],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
?assert(is_valid(BootState)),
case BootState of
stopped -> persistent_term:erase(?PT_KEY_BOOT_STATE);
@@ -36,7 +46,7 @@ set(BootState) ->
-spec wait_for(boot_state(), timeout()) -> ok | {error, timeout}.
wait_for(BootState, infinity) ->
- case is_reached(BootState) of
+ case has_reached(BootState) of
true -> ok;
false -> Wait = 200,
timer:sleep(Wait),
@@ -44,7 +54,7 @@ wait_for(BootState, infinity) ->
end;
wait_for(BootState, Timeout)
when is_integer(Timeout) andalso Timeout >= 0 ->
- case is_reached(BootState) of
+ case has_reached(BootState) of
true -> ok;
false -> Wait = 200,
timer:sleep(Wait),
@@ -53,24 +63,35 @@ wait_for(BootState, Timeout)
wait_for(_, _) ->
{error, timeout}.
-boot_state_idx(stopped) -> 0;
-boot_state_idx(booting) -> 1;
-boot_state_idx(ready) -> 2;
-boot_state_idx(stopping) -> 3.
+boot_state_idx(stopped) -> 0;
+boot_state_idx(booting) -> 1;
+boot_state_idx(core_started) -> 2;
+boot_state_idx(ready) -> 3;
+boot_state_idx(stopping) -> 4.
is_valid(BootState) ->
is_integer(boot_state_idx(BootState)).
-is_reached(TargetBootState) ->
- is_reached(?MODULE:get(), TargetBootState).
+has_reached(TargetBootState) ->
+ has_reached(?MODULE:get(), TargetBootState).
-is_reached(CurrentBootState, CurrentBootState) ->
+has_reached(CurrentBootState, CurrentBootState) ->
true;
-is_reached(stopping, stopped) ->
+has_reached(stopping, stopped) ->
false;
-is_reached(_CurrentBootState, stopped) ->
+has_reached(_CurrentBootState, stopped) ->
true;
-is_reached(stopped, _TargetBootState) ->
+has_reached(stopped, _TargetBootState) ->
true;
-is_reached(CurrentBootState, TargetBootState) ->
+has_reached(CurrentBootState, TargetBootState) ->
boot_state_idx(TargetBootState) =< boot_state_idx(CurrentBootState).
+
+has_reached_and_is_active(TargetBootState) ->
+ case ?MODULE:get() of
+ stopped ->
+ false;
+ CurrentBootState ->
+ has_reached(CurrentBootState, TargetBootState)
+ andalso
+ not has_reached(CurrentBootState, stopping)
+ end.
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_sup.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_sup.erl
index fbdc5781fc..066e5417de 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_sup.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_sup.erl
@@ -3,7 +3,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_boot_state_sup).
@@ -18,13 +18,17 @@ start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
- SystemdSpec = #{id => rabbit_boot_state_systemd,
+ SystemdSpec = #{id => systemd,
start => {rabbit_boot_state_systemd, start_link, []},
restart => transient},
+ XtermTitlebarSpec = #{id => xterm_titlebar,
+ start => {rabbit_boot_state_xterm_titlebar,
+ start_link, []},
+ restart => transient},
{ok, {#{strategy => one_for_one,
intensity => 1,
period => 5},
- [SystemdSpec]}}.
+ [SystemdSpec, XtermTitlebarSpec]}}.
-spec notify_boot_state_listeners(rabbit_boot_state:boot_state()) -> ok.
notify_boot_state_listeners(BootState) ->
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_systemd.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_systemd.erl
index f838535b6a..40cd1ec804 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_systemd.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_systemd.erl
@@ -2,13 +2,17 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2015-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2015-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_boot_state_systemd).
-behaviour(gen_server).
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/logging.hrl").
+
-export([start_link/0]).
-export([init/1,
@@ -17,42 +21,20 @@
terminate/2,
code_change/3]).
--record(state, {mechanism,
- sd_notify_module,
- socket}).
-
-define(LOG_PREFIX, "Boot state/systemd: ").
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
init([]) ->
- case os:type() of
- {unix, _} ->
- case code:load_file(sd_notify) of
- {module, sd_notify} ->
- {ok, #state{mechanism = legacy,
- sd_notify_module = sd_notify}};
- {error, _} ->
- case os:getenv("NOTIFY_SOCKET") of
- false ->
- ignore;
- "" ->
- ignore;
- Socket ->
- {ok, #state{mechanism = socat,
- socket = Socket}}
- end
- end;
- _ ->
- ignore
- end.
+ {ok, _} = application:ensure_all_started(systemd),
+ {ok, #{}}.
handle_call(_Request, _From, State) ->
{noreply, State}.
handle_cast({notify_boot_state, BootState}, State) ->
- notify_boot_state(BootState, State),
+ _ = notify_boot_state(BootState),
{noreply, State}.
terminate(normal, _State) ->
@@ -61,114 +43,33 @@ terminate(normal, _State) ->
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
-%%% Private
-
-notify_boot_state(ready = BootState,
- #state{mechanism = legacy, sd_notify_module = SDNotify}) ->
- rabbit_log_prelaunch:debug(
- ?LOG_PREFIX "notifying of state `~s` (via native module)",
- [BootState]),
- sd_notify_legacy(SDNotify);
-notify_boot_state(ready = BootState,
- #state{mechanism = socat, socket = Socket}) ->
- rabbit_log_prelaunch:debug(
- ?LOG_PREFIX "notifying of state `~s` (via socat(1))",
- [BootState]),
- sd_notify_socat(Socket);
-notify_boot_state(BootState, _) ->
- rabbit_log_prelaunch:debug(
- ?LOG_PREFIX "ignoring state `~s`",
- [BootState]),
- ok.
-
-sd_notify_message() ->
- "READY=1\nSTATUS=Initialized\nMAINPID=" ++ os:getpid() ++ "\n".
-
-sd_notify_legacy(SDNotify) ->
- SDNotify:sd_notify(0, sd_notify_message()).
-
-%% socat(1) is the most portable way the sd_notify could be
-%% implemented in erlang, without introducing some NIF. Currently the
-%% following issues prevent us from implementing it in a more
-%% reasonable way:
-%% - systemd-notify(1) is unstable for non-root users
-%% - erlang doesn't support unix domain sockets.
-%%
-%% Some details on how we ended with such a solution:
-%% https://github.com/rabbitmq/rabbitmq-server/issues/664
-sd_notify_socat(Socket) ->
- case sd_current_unit() of
- {ok, Unit} ->
- rabbit_log_prelaunch:debug(
- ?LOG_PREFIX "systemd unit for activation check: \"~s\"~n",
- [Unit]),
- sd_notify_socat(Socket, Unit);
- _ ->
- ok
- end.
-
-sd_notify_socat(Socket, Unit) ->
- try sd_open_port(Socket) of
- Port ->
- Port ! {self(), {command, sd_notify_message()}},
- Result = sd_wait_activation(Port, Unit),
- port_close(Port),
- Result
- catch
- Class:Reason ->
- rabbit_log_prelaunch:debug(
- ?LOG_PREFIX "Failed to start socat(1): ~p:~p~n",
- [Class, Reason]),
- false
- end.
-
-sd_current_unit() ->
- CmdOut = os:cmd("ps -o unit= -p " ++ os:getpid()),
- Ret = (catch re:run(CmdOut,
- "([-.@0-9a-zA-Z]+)",
- [unicode, {capture, all_but_first, list}])),
- case Ret of
- {'EXIT', _} -> error;
- {match, [Unit]} -> {ok, Unit};
- _ -> error
- end.
-
-socat_socket_arg("@" ++ AbstractUnixSocket) ->
- "abstract-sendto:" ++ AbstractUnixSocket;
-socat_socket_arg(UnixSocket) ->
- "unix-sendto:" ++ UnixSocket.
-
-sd_open_port(Socket) ->
- open_port(
- {spawn_executable, os:find_executable("socat")},
- [{args, [socat_socket_arg(Socket), "STDIO"]},
- use_stdio, out]).
-
-sd_wait_activation(Port, Unit) ->
- case os:find_executable("systemctl") of
- false ->
- rabbit_log_prelaunch:debug(
- ?LOG_PREFIX "systemctl(1) unavailable, falling back to sleep~n"),
- timer:sleep(5000),
- ok;
- _ ->
- sd_wait_activation(Port, Unit, 10)
- end.
-
-sd_wait_activation(_, _, 0) ->
- rabbit_log_prelaunch:debug(
- ?LOG_PREFIX "service still in 'activating' state, bailing out~n"),
- ok;
-sd_wait_activation(Port, Unit, AttemptsLeft) ->
- Ret = os:cmd("systemctl show --property=ActiveState -- '" ++ Unit ++ "'"),
- case Ret of
- "ActiveState=activating\n" ->
- timer:sleep(1000),
- sd_wait_activation(Port, Unit, AttemptsLeft - 1);
- "ActiveState=" ++ _ ->
- ok;
- _ = Err ->
- rabbit_log_prelaunch:debug(
- ?LOG_PREFIX "unexpected status from systemd: ~p~n", [Err]),
- ok
- end.
+%% Private
+
+notify_boot_state(ready = BootState) ->
+ Status = boot_state_to_desc(BootState),
+ ?LOG_DEBUG(
+ ?LOG_PREFIX "notifying of state `~s`",
+ [BootState],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ systemd:notify([BootState, {status, Status}]);
+notify_boot_state(BootState) ->
+ Status = boot_state_to_desc(BootState),
+ ?LOG_DEBUG(
+ ?LOG_PREFIX "sending non-systemd state (~s) as status description: "
+ "\"~s\"",
+ [BootState, Status],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ systemd:notify({status, Status}).
+
+boot_state_to_desc(stopped) ->
+ "Standing by";
+boot_state_to_desc(booting) ->
+ "Startup in progress";
+boot_state_to_desc(core_started) ->
+ "Startup in progress (core ready, starting plugins)";
+boot_state_to_desc(ready) ->
+ "";
+boot_state_to_desc(stopping) ->
+ "";
+boot_state_to_desc(BootState) ->
+ atom_to_list(BootState).
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_xterm_titlebar.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_xterm_titlebar.erl
new file mode 100644
index 0000000000..a7adfeca44
--- /dev/null
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_boot_state_xterm_titlebar.erl
@@ -0,0 +1,99 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc
+%% This module updates the titlebar of an Xterm-compatible terminal emulator
+%% to show some details of the running RabbitMQ node. It is useful for
+%% developers when running multiple RabbitMQ node in multiple terminal
+%% windows.
+-module(rabbit_boot_state_xterm_titlebar).
+
+-behaviour(gen_server).
+
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/logging.hrl").
+
+-export([start_link/0]).
+
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ terminate/2,
+ code_change/3]).
+
+-record(?MODULE, {raw_stdio_port}).
+
+-define(LOG_PREFIX, "Boot state/xterm: ").
+
+start_link() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+init([]) ->
+ %% We assume that if the OS is Unix, then the terminal emulator must be
+ %% compatible with Xterm escape sequences.
+ RunsOnUnix = case os:type() of
+ {unix, _} -> true;
+ _ -> false
+ end,
+ %% We don't know if the output is a terminal (we don't have access to
+ %% isatty(3)). Let's assume that if input is enabled, we are talking to a
+ %% terminal.
+ AcceptsInput = case init:get_argument(noinput) of
+ {ok, _} -> false;
+ error -> true
+ end,
+ case RunsOnUnix andalso AcceptsInput of
+ true ->
+ RawStdio = erlang:open_port({fd, 0, 1}, [out]),
+ State = #?MODULE{raw_stdio_port = RawStdio},
+ {ok, State};
+ false ->
+ ignore
+ end.
+
+handle_call(_Request, _From, State) ->
+ {noreply, State}.
+
+handle_cast({notify_boot_state, BootState}, State) ->
+ _ = set_xterm_titlebar(State, BootState),
+ {noreply, State}.
+
+terminate(normal, #?MODULE{raw_stdio_port = RawStdio}) ->
+ erlang:port_close(RawStdio),
+ ok.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%% Private
+
+set_xterm_titlebar(#?MODULE{raw_stdio_port = RawStdio}, BootState) ->
+ Title = format_title(BootState),
+ Binary = unicode:characters_to_binary(Title),
+ %% Read the "Operating System Controls" section in the Xterm Control
+ %% Sequences documentation:
+ %% https://www.xfree86.org/current/ctlseqs.html
+ erlang:port_command(RawStdio, ["\033]2;", Binary, "\007"]).
+
+format_title(BootState) ->
+ %% We use rabbitmq_prelaunch's version here because `rabbit` may not be
+ %% loaded yet.
+ %% FIXME: Move product info to prelaunch and use it here?
+ {ok, Vsn} = application:get_key(rabbitmq_prelaunch, vsn),
+ BootStateSuffix = case BootState of
+ ready -> "";
+ _ -> io_lib:format(": ~ts", [BootState])
+ end,
+ case node() of
+ nonode@nohost ->
+ rabbit_misc:format(
+ "RabbitMQ ~ts~ts", [Vsn, BootStateSuffix]);
+ Node ->
+ rabbit_misc:format(
+ "~s — RabbitMQ ~ts~ts", [Node, Vsn, BootStateSuffix])
+ end.
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_fmt_helpers.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_fmt_helpers.erl
new file mode 100644
index 0000000000..a9b380bce1
--- /dev/null
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_fmt_helpers.erl
@@ -0,0 +1,194 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_logger_fmt_helpers).
+
+-export([format_time/2,
+ format_level/2,
+ format_msg/3]).
+
+format_time(Timestamp, #{time_format := Format}) ->
+ format_time1(Timestamp, Format);
+format_time(Timestamp, _) ->
+ format_time1(Timestamp, {rfc3339, $\s, ""}).
+
+format_time1(Timestamp, {rfc3339, Sep, Offset}) ->
+ Options = [{unit, microsecond},
+ {offset, Offset},
+ {time_designator, Sep}],
+ calendar:system_time_to_rfc3339(Timestamp, Options);
+format_time1(Timestamp, {epoch, secs, int}) ->
+ Timestamp div 1000000;
+format_time1(Timestamp, {epoch, usecs, int}) ->
+ Timestamp;
+format_time1(Timestamp, {epoch, secs, binary}) ->
+ io_lib:format("~.6.0f", [Timestamp / 1000000]);
+format_time1(Timestamp, {epoch, usecs, binary}) ->
+ io_lib:format("~b", [Timestamp]);
+format_time1(Timestamp, {LocalOrUniversal, Format, Args}) ->
+ %% The format string and the args list is prepared by
+ %% `rabbit_prelaunch_early_logging:translate_generic_conf()'.
+ {{Year, Month, Day},
+ {Hour, Minute, Second}} = case LocalOrUniversal of
+ local ->
+ calendar:system_time_to_local_time(
+ Timestamp, microsecond);
+ universal ->
+ calendar:system_time_to_universal_time(
+ Timestamp, microsecond)
+ end,
+ Args1 = lists:map(
+ fun
+ (year) -> Year;
+ (month) -> Month;
+ (day) -> Day;
+ (hour) -> Hour;
+ (minute) -> Minute;
+ (second) -> Second;
+ ({second_fractional,
+ Decimals}) -> second_fractional(Timestamp, Decimals)
+ end, Args),
+ io_lib:format(Format, Args1).
+
+second_fractional(Timestamp, Decimals) ->
+ (Timestamp rem 1000000) div (1000000 div round(math:pow(10, Decimals))).
+
+format_level(Level, Config) ->
+ format_level1(Level, Config).
+
+format_level1(Level, #{level_format := lc}) ->
+ level_lc_name(Level);
+format_level1(Level, #{level_format := uc}) ->
+ level_uc_name(Level);
+format_level1(Level, #{level_format := lc3}) ->
+ level_3letter_lc_name(Level);
+format_level1(Level, #{level_format := uc3}) ->
+ level_3letter_uc_name(Level);
+format_level1(Level, #{level_format := lc4}) ->
+ level_4letter_lc_name(Level);
+format_level1(Level, #{level_format := uc4}) ->
+ level_4letter_uc_name(Level);
+format_level1(Level, _) ->
+ level_4letter_lc_name(Level).
+
+level_lc_name(debug) -> "debug";
+level_lc_name(info) -> "info";
+level_lc_name(notice) -> "notice";
+level_lc_name(warning) -> "warning";
+level_lc_name(error) -> "error";
+level_lc_name(critical) -> "critical";
+level_lc_name(alert) -> "alert";
+level_lc_name(emergency) -> "emergency".
+
+level_uc_name(debug) -> "DEBUG";
+level_uc_name(info) -> "INFO";
+level_uc_name(notice) -> "NOTICE";
+level_uc_name(warning) -> "WARNING";
+level_uc_name(error) -> "ERROR";
+level_uc_name(critical) -> "CRITICAL";
+level_uc_name(alert) -> "ALERT";
+level_uc_name(emergency) -> "EMERGENCY".
+
+level_3letter_lc_name(debug) -> "dbg";
+level_3letter_lc_name(info) -> "inf";
+level_3letter_lc_name(notice) -> "ntc";
+level_3letter_lc_name(warning) -> "wrn";
+level_3letter_lc_name(error) -> "err";
+level_3letter_lc_name(critical) -> "crt";
+level_3letter_lc_name(alert) -> "alt";
+level_3letter_lc_name(emergency) -> "emg".
+
+level_3letter_uc_name(debug) -> "DBG";
+level_3letter_uc_name(info) -> "INF";
+level_3letter_uc_name(notice) -> "NTC";
+level_3letter_uc_name(warning) -> "WRN";
+level_3letter_uc_name(error) -> "ERR";
+level_3letter_uc_name(critical) -> "CRT";
+level_3letter_uc_name(alert) -> "ALT";
+level_3letter_uc_name(emergency) -> "EMG".
+
+level_4letter_lc_name(debug) -> "dbug";
+level_4letter_lc_name(info) -> "info";
+level_4letter_lc_name(notice) -> "noti";
+level_4letter_lc_name(warning) -> "warn";
+level_4letter_lc_name(error) -> "erro";
+level_4letter_lc_name(critical) -> "crit";
+level_4letter_lc_name(alert) -> "alrt";
+level_4letter_lc_name(emergency) -> "emgc".
+
+level_4letter_uc_name(debug) -> "DBUG";
+level_4letter_uc_name(info) -> "INFO";
+level_4letter_uc_name(notice) -> "NOTI";
+level_4letter_uc_name(warning) -> "WARN";
+level_4letter_uc_name(error) -> "ERRO";
+level_4letter_uc_name(critical) -> "CRIT";
+level_4letter_uc_name(alert) -> "ALRT";
+level_4letter_uc_name(emergency) -> "EMGC".
+
+format_msg(Msg, Meta, #{single_line := true} = Config) ->
+ FormattedMsg = format_msg1(Msg, Meta, Config),
+ %% The following behavior is the same as the one in the official
+ %% `logger_formatter'; the code is taken from that module (as of
+ %% c5ed910098e9c2787e2c3f9f462c84322064e00d in the master branch).
+ FormattedMsg1 = string:strip(FormattedMsg, both),
+ re:replace(
+ FormattedMsg1,
+ ",?\r?\n\s*",
+ ", ",
+ [{return, list}, global, unicode]);
+format_msg(Msg, Meta, Config) ->
+ format_msg1(Msg, Meta, Config).
+
+format_msg1({string, Chardata}, Meta, Config) ->
+ format_msg1({"~ts", [Chardata]}, Meta, Config);
+format_msg1({report, Report}, Meta, Config) ->
+ FormattedReport = format_report(Report, Meta, Config),
+ format_msg1(FormattedReport, Meta, Config);
+format_msg1({Format, Args}, _, _) ->
+ io_lib:format(Format, Args).
+
+format_report(
+ #{label := {application_controller, _}} = Report, Meta, Config) ->
+ format_application_progress(Report, Meta, Config);
+format_report(
+ #{label := {supervisor, progress}} = Report, Meta, Config) ->
+ format_supervisor_progress(Report, Meta, Config);
+format_report(
+ Report, #{report_cb := Cb} = Meta, Config) ->
+ try
+ case erlang:fun_info(Cb, arity) of
+ {arity, 1} -> Cb(Report);
+ {arity, 2} -> {"~ts", [Cb(Report, #{})]}
+ end
+ catch
+ _:_:_ ->
+ format_report(Report, maps:remove(report_cb, Meta), Config)
+ end;
+format_report(Report, _, _) ->
+ logger:format_report(Report).
+
+format_application_progress(#{label := {_, progress},
+ report := InternalReport}, _, _) ->
+ Application = proplists:get_value(application, InternalReport),
+ StartedAt = proplists:get_value(started_at, InternalReport),
+ {"Application ~w started on ~0p",
+ [Application, StartedAt]};
+format_application_progress(#{label := {_, exit},
+ report := InternalReport}, _, _) ->
+ Application = proplists:get_value(application, InternalReport),
+ Exited = proplists:get_value(exited, InternalReport),
+ {"Application ~w exited with reason: ~0p",
+ [Application, Exited]}.
+
+format_supervisor_progress(#{report := InternalReport}, _, _) ->
+ Supervisor = proplists:get_value(supervisor, InternalReport),
+ Started = proplists:get_value(started, InternalReport),
+ Id = proplists:get_value(id, Started),
+ Pid = proplists:get_value(pid, Started),
+ Mfa = proplists:get_value(mfargs, Started),
+ {"Supervisor ~w: child ~w started (~w): ~0p",
+ [Supervisor, Id, Pid, Mfa]}.
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_json_fmt.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_json_fmt.erl
new file mode 100644
index 0000000000..8473340653
--- /dev/null
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_json_fmt.erl
@@ -0,0 +1,119 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_logger_json_fmt).
+
+-export([format/2]).
+
+format(
+ #{msg := Msg,
+ level := Level,
+ meta := Meta},
+ Config) ->
+ FormattedLevel = unicode:characters_to_binary(
+ rabbit_logger_fmt_helpers:format_level(Level, Config)),
+ FormattedMeta = format_meta(Meta, Config),
+ %% We need to call `unicode:characters_to_binary()' here and several other
+ %% places because `jsx:encode()' will format a string as a list of
+ %% integers (we don't blame it for that, it makes sense).
+ FormattedMsg = unicode:characters_to_binary(
+ rabbit_logger_fmt_helpers:format_msg(Msg, Meta, Config)),
+ InitialDoc0 = FormattedMeta#{level => FormattedLevel,
+ msg => FormattedMsg},
+ InitialDoc = case level_to_verbosity(Level, Config) of
+ undefined -> InitialDoc0;
+ Verbosity -> InitialDoc0#{verbosity => Verbosity}
+ end,
+ DocAfterMapping = apply_mapping_and_ordering(InitialDoc, Config),
+ Json = jsx:encode(DocAfterMapping),
+ [Json, $\n].
+
+level_to_verbosity(Level, #{verbosity_map := Mapping}) ->
+ case maps:is_key(Level, Mapping) of
+ true -> maps:get(Level, Mapping);
+ false -> undefined
+ end;
+level_to_verbosity(_, _) ->
+ undefined.
+
+format_meta(Meta, Config) ->
+ maps:fold(
+ fun
+ (time, Timestamp, Acc) ->
+ FormattedTime0 = rabbit_logger_fmt_helpers:format_time(
+ Timestamp, Config),
+ FormattedTime1 = case is_number(FormattedTime0) of
+ true -> FormattedTime0;
+ false -> unicode:characters_to_binary(
+ FormattedTime0)
+ end,
+ Acc#{time => FormattedTime1};
+ (domain = Key, Components, Acc) ->
+ Term = unicode:characters_to_binary(
+ string:join(
+ [atom_to_list(Cmp) || Cmp <- Components],
+ ".")),
+ Acc#{Key => Term};
+ (Key, Value, Acc) ->
+ case convert_to_types_accepted_by_jsx(Value) of
+ false -> Acc;
+ Term -> Acc#{Key => Term}
+ end
+ end, #{}, Meta).
+
+convert_to_types_accepted_by_jsx(Term) when is_map(Term) ->
+ maps:map(
+ fun(_, Value) -> convert_to_types_accepted_by_jsx(Value) end,
+ Term);
+convert_to_types_accepted_by_jsx(Term) when is_list(Term) ->
+ case io_lib:deep_char_list(Term) of
+ true ->
+ unicode:characters_to_binary(Term);
+ false ->
+ [convert_to_types_accepted_by_jsx(E) || E <- Term]
+ end;
+convert_to_types_accepted_by_jsx(Term) when is_tuple(Term) ->
+ convert_to_types_accepted_by_jsx(erlang:tuple_to_list(Term));
+convert_to_types_accepted_by_jsx(Term) when is_function(Term) ->
+ String = erlang:fun_to_list(Term),
+ unicode:characters_to_binary(String);
+convert_to_types_accepted_by_jsx(Term) when is_pid(Term) ->
+ String = erlang:pid_to_list(Term),
+ unicode:characters_to_binary(String);
+convert_to_types_accepted_by_jsx(Term) when is_port(Term) ->
+ String = erlang:port_to_list(Term),
+ unicode:characters_to_binary(String);
+convert_to_types_accepted_by_jsx(Term) when is_reference(Term) ->
+ String = erlang:ref_to_list(Term),
+ unicode:characters_to_binary(String);
+convert_to_types_accepted_by_jsx(Term) ->
+ Term.
+
+apply_mapping_and_ordering(Doc, #{field_map := Mapping}) ->
+ apply_mapping_and_ordering(Mapping, Doc, []);
+apply_mapping_and_ordering(Doc, _) ->
+ maps:to_list(Doc).
+
+apply_mapping_and_ordering([{'$REST', false} | Rest], _, Result) ->
+ apply_mapping_and_ordering(Rest, #{}, Result);
+apply_mapping_and_ordering([{Old, false} | Rest], Doc, Result)
+ when is_atom(Old) ->
+ Doc1 = maps:remove(Old, Doc),
+ apply_mapping_and_ordering(Rest, Doc1, Result);
+apply_mapping_and_ordering([{Old, New} | Rest], Doc, Result)
+ when is_atom(Old) andalso is_atom(New) ->
+ case maps:is_key(Old, Doc) of
+ true ->
+ Value = maps:get(Old, Doc),
+ Doc1 = maps:remove(Old, Doc),
+ Result1 = [{New, Value} | Result],
+ apply_mapping_and_ordering(Rest, Doc1, Result1);
+ false ->
+ apply_mapping_and_ordering(Rest, Doc, Result)
+ end;
+apply_mapping_and_ordering([], Doc, Result) ->
+ lists:reverse(Result) ++ maps:to_list(Doc).
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_std_h.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_std_h.erl
new file mode 100644
index 0000000000..abb1d2ff73
--- /dev/null
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_std_h.erl
@@ -0,0 +1,905 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017-2020. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(rabbit_logger_std_h).
+
+-ifdef(TEST).
+-define(io_put_chars(DEVICE, DATA), begin
+ %% We log to Common Test log as well.
+ %% This is the file we use to check
+ %% the message made it to
+ %% stdout/stderr.
+ ct:log("~ts", [DATA]),
+ io:put_chars(DEVICE, DATA)
+ end).
+
+-export([parse_date_spec/1, parse_day_of_week/2, parse_day_of_month/2, parse_hour/2, parse_minute/2]).
+-else.
+-define(io_put_chars(DEVICE, DATA), io:put_chars(DEVICE, DATA)).
+-endif.
+-define(file_write(DEVICE, DATA), file:write(DEVICE, DATA)).
+-define(file_datasync(DEVICE), file:datasync(DEVICE)).
+
+-include_lib("kernel/include/file.hrl").
+
+%% API
+-export([filesync/1]).
+-export([is_date_based_rotation_needed/3]).
+
+%% logger_h_common callbacks
+-export([init/2, check_config/4, config_changed/3, reset_state/2,
+ filesync/3, write/4, handle_info/3, terminate/3]).
+
+%% logger callbacks
+-export([log/2, adding_handler/1, removing_handler/1, changing_config/3,
+ filter_config/1]).
+
+-define(DEFAULT_CALL_TIMEOUT, 5000).
+
+%%%===================================================================
+%%% API
+%%%===================================================================
+
+%%%-----------------------------------------------------------------
+%%%
+-spec filesync(Name) -> ok | {error,Reason} when
+ Name :: atom(),
+ Reason :: handler_busy | {badarg,term()}.
+
+filesync(Name) ->
+ logger_h_common:filesync(?MODULE,Name).
+
+%%%===================================================================
+%%% logger callbacks - just forward to logger_h_common
+%%%===================================================================
+
+%%%-----------------------------------------------------------------
+%%% Handler being added
+-spec adding_handler(Config) -> {ok,Config} | {error,Reason} when
+ Config :: logger:handler_config(),
+ Reason :: term().
+
+adding_handler(Config) ->
+ logger_h_common:adding_handler(Config).
+
+%%%-----------------------------------------------------------------
+%%% Updating handler config
+-spec changing_config(SetOrUpdate, OldConfig, NewConfig) ->
+ {ok,Config} | {error,Reason} when
+ SetOrUpdate :: set | update,
+ OldConfig :: logger:handler_config(),
+ NewConfig :: logger:handler_config(),
+ Config :: logger:handler_config(),
+ Reason :: term().
+
+changing_config(SetOrUpdate, OldConfig, NewConfig) ->
+ logger_h_common:changing_config(SetOrUpdate, OldConfig, NewConfig).
+
+%%%-----------------------------------------------------------------
+%%% Handler being removed
+-spec removing_handler(Config) -> ok when
+ Config :: logger:handler_config().
+
+removing_handler(Config) ->
+ logger_h_common:removing_handler(Config).
+
+%%%-----------------------------------------------------------------
+%%% Log a string or report
+-spec log(LogEvent, Config) -> ok when
+ LogEvent :: logger:log_event(),
+ Config :: logger:handler_config().
+
+log(LogEvent, Config) ->
+ logger_h_common:log(LogEvent, Config).
+
+%%%-----------------------------------------------------------------
+%%% Remove internal fields from configuration
+-spec filter_config(Config) -> Config when
+ Config :: logger:handler_config().
+
+filter_config(Config) ->
+ logger_h_common:filter_config(Config).
+
+%%%===================================================================
+%%% logger_h_common callbacks
+%%%===================================================================
+init(Name, Config) ->
+ MyConfig = maps:with([type,file,modes,file_check,max_no_bytes,
+ rotate_on_date,max_no_files,compress_on_rotate],
+ Config),
+ case file_ctrl_start(Name, MyConfig) of
+ {ok,FileCtrlPid} ->
+ {ok,MyConfig#{file_ctrl_pid=>FileCtrlPid}};
+ Error ->
+ Error
+ end.
+
+check_config(Name,set,undefined,NewHConfig) ->
+ check_h_config(merge_default_config(Name,normalize_config(NewHConfig)));
+check_config(Name,SetOrUpdate,OldHConfig,NewHConfig0) ->
+ WriteOnce = maps:with([type,file,modes],OldHConfig),
+ Default =
+ case SetOrUpdate of
+ set ->
+ %% Do not reset write-once fields to defaults
+ merge_default_config(Name,WriteOnce);
+ update ->
+ OldHConfig
+ end,
+
+ NewHConfig = maps:merge(Default, normalize_config(NewHConfig0)),
+
+ %% Fail if write-once fields are changed
+ case maps:with([type,file,modes],NewHConfig) of
+ WriteOnce ->
+ check_h_config(NewHConfig);
+ Other ->
+ {error,{illegal_config_change,?MODULE,WriteOnce,Other}}
+ end.
+
+check_h_config(HConfig) ->
+ case check_h_config(maps:get(type,HConfig),maps:to_list(HConfig)) of
+ ok ->
+ {ok,fix_file_opts(HConfig)};
+ {error,{Key,Value}} ->
+ {error,{invalid_config,?MODULE,#{Key=>Value}}}
+ end.
+
+check_h_config(Type,[{type,Type} | Config]) when Type =:= standard_io;
+ Type =:= standard_error;
+ Type =:= file ->
+ check_h_config(Type,Config);
+check_h_config({device,Device},[{type,{device,Device}} | Config]) ->
+ check_h_config({device,Device},Config);
+check_h_config(file,[{file,File} | Config]) when is_list(File) ->
+ check_h_config(file,Config);
+check_h_config(file,[{modes,Modes} | Config]) when is_list(Modes) ->
+ check_h_config(file,Config);
+check_h_config(file,[{max_no_bytes,Size} | Config])
+ when (is_integer(Size) andalso Size>0) orelse Size=:=infinity ->
+ check_h_config(file,Config);
+check_h_config(file,[{rotate_on_date,DateSpec}=Param | Config])
+ when is_list(DateSpec) orelse DateSpec=:=false ->
+ case parse_date_spec(DateSpec) of
+ error -> {error,Param};
+ _ -> check_h_config(file,Config)
+ end;
+check_h_config(file,[{max_no_files,Num} | Config]) when is_integer(Num), Num>=0 ->
+ check_h_config(file,Config);
+check_h_config(file,[{compress_on_rotate,Bool} | Config]) when is_boolean(Bool) ->
+ check_h_config(file,Config);
+check_h_config(file,[{file_check,FileCheck} | Config])
+ when is_integer(FileCheck), FileCheck>=0 ->
+ check_h_config(file,Config);
+check_h_config(_Type,[Other | _]) ->
+ {error,Other};
+check_h_config(_Type,[]) ->
+ ok.
+
+normalize_config(#{type:={file,File}}=HConfig) ->
+ normalize_config(HConfig#{type=>file,file=>File});
+normalize_config(#{type:={file,File,Modes}}=HConfig) ->
+ normalize_config(HConfig#{type=>file,file=>File,modes=>Modes});
+normalize_config(#{file:=File}=HConfig) ->
+ HConfig#{file=>filename:absname(File)};
+normalize_config(HConfig) ->
+ HConfig.
+
+merge_default_config(Name,#{type:=Type}=HConfig) ->
+ merge_default_config(Name,Type,HConfig);
+merge_default_config(Name,#{file:=_}=HConfig) ->
+ merge_default_config(Name,file,HConfig);
+merge_default_config(Name,HConfig) ->
+ merge_default_config(Name,standard_io,HConfig).
+
+merge_default_config(Name,Type,HConfig) ->
+ maps:merge(get_default_config(Name,Type),HConfig).
+
+get_default_config(Name,file) ->
+ #{type => file,
+ file => filename:absname(atom_to_list(Name)),
+ modes => [raw,append],
+ file_check => 0,
+ max_no_bytes => infinity,
+ rotate_on_date => false,
+ max_no_files => 0,
+ compress_on_rotate => false};
+get_default_config(_Name,Type) ->
+ #{type => Type}.
+
+fix_file_opts(#{modes:=Modes}=HConfig) ->
+ HConfig#{modes=>fix_modes(Modes)};
+fix_file_opts(HConfig) ->
+ HConfig#{filesync_repeat_interval=>no_repeat}.
+
+fix_modes(Modes) ->
+ %% Ensure write|append|exclusive
+ Modes1 =
+ case [M || M <- Modes,
+ lists:member(M,[write,append,exclusive])] of
+ [] -> [append|Modes];
+ _ -> Modes
+ end,
+ %% Ensure raw
+ Modes2 =
+ case lists:member(raw,Modes) of
+ false -> [raw|Modes1];
+ true -> Modes1
+ end,
+ %% Ensure delayed_write
+ case lists:partition(fun(delayed_write) -> true;
+ ({delayed_write,_,_}) -> true;
+ (_) -> false
+ end, Modes2) of
+ {[],_} ->
+ [delayed_write|Modes2];
+ _ ->
+ Modes2
+ end.
+
+config_changed(_Name,
+ #{file_check:=FileCheck,
+ max_no_bytes:=Size,
+ rotate_on_date:=DateSpec,
+ max_no_files:=Count,
+ compress_on_rotate:=Compress},
+ #{file_check:=FileCheck,
+ max_no_bytes:=Size,
+ rotate_on_date:=DateSpec,
+ max_no_files:=Count,
+ compress_on_rotate:=Compress}=State) ->
+ State;
+config_changed(_Name,
+ #{file_check:=FileCheck,
+ max_no_bytes:=Size,
+ rotate_on_date:=DateSpec,
+ max_no_files:=Count,
+ compress_on_rotate:=Compress},
+ #{file_ctrl_pid := FileCtrlPid} = State) ->
+ FileCtrlPid ! {update_config,#{file_check=>FileCheck,
+ max_no_bytes=>Size,
+ rotate_on_date=>DateSpec,
+ max_no_files=>Count,
+ compress_on_rotate=>Compress}},
+ State#{file_check:=FileCheck,
+ max_no_bytes:=Size,
+ rotate_on_date:=DateSpec,
+ max_no_files:=Count,
+ compress_on_rotate:=Compress};
+config_changed(_Name,_NewHConfig,State) ->
+ State.
+
+filesync(_Name, SyncAsync, #{file_ctrl_pid := FileCtrlPid} = State) ->
+ Result = file_ctrl_filesync(SyncAsync, FileCtrlPid),
+ {Result,State}.
+
+write(_Name, SyncAsync, Bin, #{file_ctrl_pid:=FileCtrlPid} = State) ->
+ Result = file_write(SyncAsync, FileCtrlPid, Bin),
+ {Result,State}.
+
+reset_state(_Name, State) ->
+ State.
+
+handle_info(_Name, {'EXIT',Pid,Why}, #{file_ctrl_pid := Pid}=State) ->
+ %% file_ctrl_pid died, file error, terminate handler
+ exit({error,{write_failed,maps:with([type,file,modes],State),Why}});
+handle_info(_, _, State) ->
+ State.
+
+terminate(_Name, _Reason, #{file_ctrl_pid:=FWPid}) ->
+ case is_process_alive(FWPid) of
+ true ->
+ unlink(FWPid),
+ _ = file_ctrl_stop(FWPid),
+ MRef = erlang:monitor(process, FWPid),
+ receive
+ {'DOWN',MRef,_,_,_} ->
+ ok
+ after
+ ?DEFAULT_CALL_TIMEOUT ->
+ exit(FWPid, kill),
+ ok
+ end;
+ false ->
+ ok
+ end.
+
+%%%===================================================================
+%%% Internal functions
+%%%===================================================================
+
+%%%-----------------------------------------------------------------
+%%%
+open_log_file(HandlerName,#{type:=file,
+ file:=FileName,
+ modes:=Modes,
+ file_check:=FileCheck}) ->
+ try
+ case filelib:ensure_dir(FileName) of
+ ok ->
+ case file:open(FileName, Modes) of
+ {ok, Fd} ->
+ {ok,#file_info{inode=INode}} =
+ file:read_file_info(FileName,[raw]),
+ UpdateModes = [append | Modes--[write,append,exclusive]],
+ {ok,#{handler_name=>HandlerName,
+ file_name=>FileName,
+ modes=>UpdateModes,
+ file_check=>FileCheck,
+ fd=>Fd,
+ inode=>INode,
+ last_check=>timestamp(),
+ synced=>false,
+ write_res=>ok,
+ sync_res=>ok}};
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end
+ catch
+ _:Reason -> {error,Reason}
+ end.
+
+close_log_file(#{fd:=Fd}) ->
+ _ = file:datasync(Fd), %% file:datasync may return error as it will flush the delayed_write buffer
+ _ = file:close(Fd),
+ ok;
+close_log_file(_) ->
+ ok.
+
+%% A special close that closes the FD properly when the delayed write close failed
+delayed_write_close(#{fd:=Fd}) ->
+ case file:close(Fd) of
+ %% We got an error while closing, could be a delayed write failing
+ %% So we close again in order to make sure the file is closed.
+ {error, _} ->
+ file:close(Fd);
+ Res ->
+ Res
+ end.
+
+%%%-----------------------------------------------------------------
+%%% File control process
+
+file_ctrl_start(HandlerName, HConfig) ->
+ Starter = self(),
+ FileCtrlPid =
+ spawn_link(fun() ->
+ file_ctrl_init(HandlerName, HConfig, Starter)
+ end),
+ receive
+ {FileCtrlPid,ok} ->
+ {ok,FileCtrlPid};
+ {FileCtrlPid,Error} ->
+ Error
+ after
+ ?DEFAULT_CALL_TIMEOUT ->
+ {error,file_ctrl_process_not_started}
+ end.
+
+file_ctrl_stop(Pid) ->
+ Pid ! stop.
+
+file_write(async, Pid, Bin) ->
+ Pid ! {log,Bin},
+ ok;
+file_write(sync, Pid, Bin) ->
+ file_ctrl_call(Pid, {log,Bin}).
+
+file_ctrl_filesync(async, Pid) ->
+ Pid ! filesync,
+ ok;
+file_ctrl_filesync(sync, Pid) ->
+ file_ctrl_call(Pid, filesync).
+
+file_ctrl_call(Pid, Msg) ->
+ MRef = monitor(process, Pid),
+ Pid ! {Msg,{self(),MRef}},
+ receive
+ {MRef,Result} ->
+ demonitor(MRef, [flush]),
+ Result;
+ {'DOWN',MRef,_Type,_Object,Reason} ->
+ {error,Reason}
+ after
+ ?DEFAULT_CALL_TIMEOUT ->
+ %% If this timeout triggers we will get a stray
+ %% reply message in our mailbox eventually.
+ %% That does not really matter though as it will
+ %% end up in this module's handle_info and be ignored
+ demonitor(MRef, [flush]),
+ {error,{no_response,Pid}}
+ end.
+
+file_ctrl_init(HandlerName,
+ #{type:=file,
+ max_no_bytes:=Size,
+ rotate_on_date:=DateSpec,
+ max_no_files:=Count,
+ compress_on_rotate:=Compress,
+ file:=FileName} = HConfig,
+ Starter) ->
+ process_flag(message_queue_data, off_heap),
+ case open_log_file(HandlerName,HConfig) of
+ {ok,State} ->
+ Starter ! {self(),ok},
+ %% Do the initial rotate (if any) after we ack the starting
+ %% process as otherwise startup of the system will be
+ %% delayed/crash
+ case parse_date_spec(DateSpec) of
+ error ->
+ Starter ! {self(),{error,{invalid_date_spec,DateSpec}}};
+ ParsedDS ->
+ RotState = update_rotation({Size,ParsedDS,Count,Compress},State),
+ file_ctrl_loop(RotState)
+ end;
+ {error,Reason} ->
+ Starter ! {self(),{error,{open_failed,FileName,Reason}}}
+ end;
+file_ctrl_init(HandlerName, #{type:={device,Dev}}, Starter) ->
+ Starter ! {self(),ok},
+ file_ctrl_loop(#{handler_name=>HandlerName,dev=>Dev});
+file_ctrl_init(HandlerName, #{type:=StdDev}, Starter) ->
+ Starter ! {self(),ok},
+ file_ctrl_loop(#{handler_name=>HandlerName,dev=>StdDev}).
+
+file_ctrl_loop(State) ->
+ receive
+ %% asynchronous event
+ {log,Bin} ->
+ State1 = write_to_dev(Bin,State),
+ file_ctrl_loop(State1);
+
+ %% synchronous event
+ {{log,Bin},{From,MRef}} ->
+ State1 = ensure_file(State),
+ State2 = write_to_dev(Bin,State1),
+ From ! {MRef,ok},
+ file_ctrl_loop(State2);
+
+ filesync ->
+ State1 = sync_dev(State),
+ file_ctrl_loop(State1);
+
+ {filesync,{From,MRef}} ->
+ State1 = ensure_file(State),
+ State2 = sync_dev(State1),
+ From ! {MRef,ok},
+ file_ctrl_loop(State2);
+
+ {update_config,#{file_check:=FileCheck,
+ max_no_bytes:=Size,
+ rotate_on_date:=DateSpec,
+ max_no_files:=Count,
+ compress_on_rotate:=Compress}} ->
+ case parse_date_spec(DateSpec) of
+ error ->
+ %% FIXME: Report parsing error?
+ file_ctrl_loop(State#{file_check=>FileCheck});
+ ParsedDS ->
+ State1 = update_rotation({Size,ParsedDS,Count,Compress},State),
+ file_ctrl_loop(State1#{file_check=>FileCheck})
+ end;
+
+ stop ->
+ close_log_file(State),
+ stopped
+ end.
+
+maybe_ensure_file(#{file_check:=0}=State) ->
+ ensure_file(State);
+maybe_ensure_file(#{last_check:=T0,file_check:=CheckInt}=State)
+ when is_integer(CheckInt) ->
+ T = timestamp(),
+ if T-T0 > CheckInt -> ensure_file(State);
+ true -> State
+ end;
+maybe_ensure_file(State) ->
+ State.
+
+%% In order to play well with tools like logrotate, we need to be able
+%% to re-create the file if it has disappeared (e.g. if rotated by
+%% logrotate)
+ensure_file(#{inode:=INode0,file_name:=FileName,modes:=Modes}=State) ->
+ case file:read_file_info(FileName,[raw]) of
+ {ok,#file_info{inode=INode0}} ->
+ State#{last_check=>timestamp()};
+ _ ->
+ close_log_file(State),
+ case file:open(FileName,Modes) of
+ {ok,Fd} ->
+ {ok,#file_info{inode=INode}} =
+ file:read_file_info(FileName,[raw]),
+ State#{fd=>Fd,inode=>INode,
+ last_check=>timestamp(),
+ synced=>true,sync_res=>ok};
+ Error ->
+ exit({could_not_reopen_file,Error})
+ end
+ end;
+ensure_file(State) ->
+ State.
+
+write_to_dev(Bin,#{dev:=DevName}=State) ->
+ ?io_put_chars(DevName, Bin),
+ State;
+write_to_dev(Bin, State) ->
+ State1 = #{fd:=Fd} = maybe_ensure_file(State),
+ Result = ?file_write(Fd, Bin),
+ State2 = maybe_rotate_file(Bin,State1),
+ maybe_notify_error(write,Result,State2),
+ State2#{synced=>false,write_res=>Result}.
+
+sync_dev(#{synced:=false}=State) ->
+ State1 = #{fd:=Fd} = maybe_ensure_file(State),
+ Result = ?file_datasync(Fd),
+ maybe_notify_error(filesync,Result,State1),
+ State1#{synced=>true,sync_res=>Result};
+sync_dev(State) ->
+ State.
+
+update_rotation({infinity,false,_,_},State) ->
+ maybe_remove_archives(0,State),
+ maps:remove(rotation,State);
+update_rotation({Size,DateSpec,Count,Compress},#{file_name:=FileName}=State) ->
+ maybe_remove_archives(Count,State),
+ {ok,#file_info{size=CurrSize}} = file:read_file_info(FileName,[raw]),
+ State1 = State#{rotation=>#{size=>Size,
+ on_date=>DateSpec,
+ count=>Count,
+ compress=>Compress,
+ curr_size=>CurrSize}},
+ maybe_update_compress(0,State1),
+ maybe_rotate_file(0,State1).
+
+%%
+%% Date spec parser
+%%
+
+%% Some examples from Lager docs:
+%%
+%% $D0 rotate every night at midnight
+%% $D23 rotate every day at 23:00 hr
+%% $W0D23 rotate every week on Sunday at 23:00 hr
+%% $W5D16 rotate every week on Friday at 16:00 hr
+%% $M1D0 rotate on the first day of every month at
+%% midnight (i.e., the start of the day)
+%% $M5D6 rotate on every 5th day of the month at
+%% 6:00 hr
+
+parse_date_spec(false) ->
+ false;
+parse_date_spec("") ->
+ false;
+parse_date_spec(Input) ->
+ parse_date_spec(Input, #{}).
+
+parse_date_spec("", Acc) ->
+ Acc;
+%% $D23
+parse_date_spec([$$, $D, D1, D2 | Rest], Acc0) when D1 >= $0, D1 =< $9, D2 >= $0, D2 =< $9 ->
+ Acc = parse_hour([D1, D2], Acc0#{every => day, hour => 0}),
+ parse_date_spec(Rest, Acc);
+%% D23
+parse_date_spec([$D, D1, D2 | Rest], Acc0) when D1 >= $0, D1 =< $9, D2 >= $0, D2 =< $9 ->
+ Acc = parse_hour([D1, D2], Acc0#{hour => 0}),
+ parse_date_spec(Rest, Acc);
+%% $D0
+parse_date_spec([$$, $D, D1 | Rest], Acc0) when D1 >= $0, D1 =< $9 ->
+ Acc = parse_hour([D1], Acc0#{every => day, hour => 0}),
+ parse_date_spec(Rest, Acc);
+%% D0
+parse_date_spec([$D, D1 | Rest], Acc0) when D1 >= $0, D1 =< $9 ->
+ Acc = parse_hour([D1], Acc0#{hour => 0}),
+ parse_date_spec(Rest, Acc);
+%% $H23
+parse_date_spec([$$, $H, H1, H2 | Rest], Acc0) when H1 >= $0, H1 =< $9, H2 >= $0, H2 =< $9 ->
+ Acc = parse_minute([H1, H2], Acc0#{every => hour}),
+ parse_date_spec(Rest, Acc);
+%% H23
+parse_date_spec([$H, H1, H2 | Rest], Acc0) when H1 >= $0, H1 =< $9, H2 >= $0, H2 =< $9 ->
+ Acc = parse_minute([H1, H2], Acc0),
+ parse_date_spec(Rest, Acc);
+%% $H0
+parse_date_spec([$$, $H, H1 | Rest], Acc0) when H1 >= $0, H1 =< $9 ->
+ Acc = parse_minute([H1], Acc0#{every => hour}),
+ parse_date_spec(Rest, Acc);
+%% H0
+parse_date_spec([$H, H1 | Rest], Acc0) when H1 >= $0, H1 =< $9 ->
+ Acc = parse_minute([H1], Acc0),
+ parse_date_spec(Rest, Acc);
+%% $W0
+parse_date_spec([$$, $W, W | Rest], Acc0) when W >= $0, W =< $6 ->
+ Acc = parse_day_of_week([W], Acc0#{every => week, hour => 0}),
+ parse_date_spec(Rest, Acc);
+%% $M0
+parse_date_spec([$$, $M, M | Rest], Acc0) when M >= $0, M =< $6 ->
+ Acc = parse_day_of_month([M], Acc0#{every => month, hour => 0}),
+ parse_date_spec(Rest, Acc);
+%% all other inputs
+parse_date_spec(Input, _Acc) ->
+ io:format(standard_error, "Failed to parse rotation date spec: ~p (error)~n", [Input]),
+ error.
+
+parse_minute("", Acc) ->
+ Acc;
+parse_minute(Input, Acc) ->
+ case string_to_int_within_range(Input, 0, 59) of
+ {Val, _Rest} -> Acc#{minute => Val};
+ error -> error
+ end.
+
+parse_hour("", Acc) ->
+ Acc;
+parse_hour(Input, Acc) ->
+ case string_to_int_within_range(Input, 0, 23) of
+ {Val, _Rest} -> Acc#{hour => Val};
+ error -> error
+ end.
+
+parse_day_of_week("", Acc) ->
+ Acc;
+parse_day_of_week(Input, Acc) ->
+ case string_to_int_within_range(Input, 0, 6) of
+ {DayOfWeek, _Rest} -> Acc#{day_of_week => DayOfWeek};
+ error -> error
+ end.
+
+parse_day_of_month("", Acc) ->
+ Acc;
+parse_day_of_month([Last | _Rest], Acc)
+ when Last=:=$l orelse Last=:=$L ->
+ Acc#{day_of_month => last};
+parse_day_of_month(Input, Acc) ->
+ case string_to_int_within_range(Input, 1, 31) of
+ {DayOfMonth, _Rest} -> Acc#{day_of_month => DayOfMonth};
+ error -> error
+ end.
+
+string_to_int_within_range(String, Min, Max) ->
+ case string:to_integer(String) of
+ {Int, Rest} when is_integer(Int) andalso Int >= Min andalso Int =< Max ->
+ {Int, Rest};
+ _ ->
+ error
+ end.
+
+%%
+%% End of Date spec parser
+%%
+
+maybe_remove_archives(Count,#{file_name:=FileName}=State) ->
+ Archive = rot_file_name(FileName,Count,false),
+ CompressedArchive = rot_file_name(FileName,Count,true),
+ case {file:read_file_info(Archive,[raw]),
+ file:read_file_info(CompressedArchive,[raw])} of
+ {{error,enoent},{error,enoent}} ->
+ ok;
+ _ ->
+ _ = file:delete(Archive),
+ _ = file:delete(CompressedArchive),
+ maybe_remove_archives(Count+1,State)
+ end.
+
+maybe_update_compress(Count,#{rotation:=#{count:=Count}}) ->
+ ok;
+maybe_update_compress(N,#{file_name:=FileName,
+ rotation:=#{compress:=Compress}}=State) ->
+ Archive = rot_file_name(FileName,N,not Compress),
+ case file:read_file_info(Archive,[raw]) of
+ {ok,_} when Compress ->
+ compress_file(Archive);
+ {ok,_} ->
+ decompress_file(Archive);
+ _ ->
+ ok
+ end,
+ maybe_update_compress(N+1,State).
+
+maybe_rotate_file(Bin,#{rotation:=_}=State) when is_binary(Bin) ->
+ maybe_rotate_file(byte_size(Bin),State);
+maybe_rotate_file(AddSize,#{rotation:=#{size:=RotSize,
+ curr_size:=CurrSize}=Rotation}=State) ->
+ {DateBasedRotNeeded, Rotation1} = is_date_based_rotation_needed(Rotation),
+ NewSize = CurrSize + AddSize,
+ if NewSize>RotSize ->
+ rotate_file(State#{rotation=>Rotation1#{curr_size=>NewSize}});
+ DateBasedRotNeeded ->
+ rotate_file(State#{rotation=>Rotation1#{curr_size=>NewSize}});
+ true ->
+ State#{rotation=>Rotation1#{curr_size=>NewSize}}
+ end;
+maybe_rotate_file(_Bin,State) ->
+ State.
+
+is_date_based_rotation_needed(#{last_rotation_ts:=PrevTimestamp,
+ on_date:=DateSpec}=Rotation) ->
+ CurrTimestamp = rotation_timestamp(),
+ case is_date_based_rotation_needed(DateSpec,PrevTimestamp,CurrTimestamp) of
+ true -> {true,Rotation#{last_rotation_ts=>CurrTimestamp}};
+ false -> {false,Rotation}
+ end;
+is_date_based_rotation_needed(Rotation) ->
+ {false,Rotation#{last_rotation_ts=>rotation_timestamp()}}.
+
+is_date_based_rotation_needed(#{every:=day,hour:=Hour},
+ {Date1,Time1},{Date2,Time2})
+ when (Date1<Date2 orelse (Date1=:=Date2 andalso Time1<{Hour,0,0})) andalso
+ Time2>={Hour,0,0} ->
+ true;
+is_date_based_rotation_needed(#{every:=day,hour:=Hour},
+ {Date1,_}=DateTime1,{Date2,Time2}=DateTime2)
+ when Date1<Date2 andalso
+ Time2<{Hour,0,0} ->
+ GregDays2 = calendar:date_to_gregorian_days(Date2),
+ TargetDate = calendar:gregorian_days_to_date(GregDays2 - 1),
+ TargetDateTime = {TargetDate,{Hour,0,0}},
+ DateTime1<TargetDateTime andalso DateTime2>=TargetDateTime;
+is_date_based_rotation_needed(#{every:=week,day_of_week:=TargetDoW,hour:=Hour},
+ DateTime1,{Date2,_}=DateTime2) ->
+ DoW2 = calendar:day_of_the_week(Date2) rem 7,
+ DaysSinceTargetDoW = ((DoW2 - TargetDoW) + 7) rem 7,
+ GregDays2 = calendar:date_to_gregorian_days(Date2),
+ TargetGregDays = GregDays2 - DaysSinceTargetDoW,
+ TargetDate = calendar:gregorian_days_to_date(TargetGregDays),
+ TargetDateTime = {TargetDate,{Hour,0,0}},
+ DateTime1<TargetDateTime andalso DateTime2>=TargetDateTime;
+is_date_based_rotation_needed(#{every:=month,day_of_month:=last,hour:=Hour},
+ DateTime1,{{Year2,Month2,_}=Date2,_}=DateTime2) ->
+ DoMA = calendar:last_day_of_the_month(Year2, Month2),
+ DateA = {Year2,Month2,DoMA},
+ TargetDate = if
+ DateA>Date2 ->
+ case Month2 - 1 of
+ 0 ->
+ {Year2-1,12,31};
+ MonthB ->
+ {Year2,MonthB,
+ calendar:last_day_of_the_month(Year2,MonthB)}
+ end;
+ true ->
+ DateA
+ end,
+ TargetDateTime = {TargetDate,{Hour,0,0}},
+ DateTime1<TargetDateTime andalso DateTime2>=TargetDateTime;
+is_date_based_rotation_needed(#{every:=month,day_of_month:=DoM,hour:=Hour},
+ DateTime1,{{Year2,Month2,_}=Date2,_}=DateTime2) ->
+ DateA = {Year2,Month2,adapt_day_of_month(Year2,Month2,DoM)},
+ TargetDate = if
+ DateA>Date2 ->
+ case Month2 - 1 of
+ 0 ->
+ {Year2-1,12,31};
+ MonthB ->
+ {Year2,MonthB,
+ adapt_day_of_month(Year2,MonthB,DoM)}
+ end;
+ true ->
+ DateA
+ end,
+ TargetDateTime = {TargetDate,{Hour,0,0}},
+ DateTime1<TargetDateTime andalso DateTime2>=TargetDateTime;
+is_date_based_rotation_needed(_,_,_) ->
+ false.
+
+adapt_day_of_month(Year,Month,Day) ->
+ LastDay = calendar:last_day_of_the_month(Year,Month),
+ erlang:min(Day,LastDay).
+
+rotate_file(#{file_name:=FileName,modes:=Modes,rotation:=Rotation}=State) ->
+ State1 = sync_dev(State),
+ _ = delayed_write_close(State),
+ rotate_files(FileName,maps:get(count,Rotation),maps:get(compress,Rotation)),
+ case file:open(FileName,Modes) of
+ {ok,Fd} ->
+ {ok,#file_info{inode=INode}} = file:read_file_info(FileName,[raw]),
+ CurrTimestamp = rotation_timestamp(),
+ State1#{fd=>Fd,inode=>INode,
+ rotation=>Rotation#{curr_size=>0,
+ last_rotation_ts=>CurrTimestamp}};
+ Error ->
+ exit({could_not_reopen_file,Error})
+ end.
+
+rotation_timestamp() ->
+ calendar:now_to_local_time(erlang:timestamp()).
+
+rotate_files(FileName,0,_Compress) ->
+ _ = file:delete(FileName),
+ ok;
+rotate_files(FileName,1,Compress) ->
+ FileName0 = FileName++".0",
+ _ = file:rename(FileName,FileName0),
+ if Compress -> compress_file(FileName0);
+ true -> ok
+ end,
+ ok;
+rotate_files(FileName,Count,Compress) ->
+ _ = file:rename(rot_file_name(FileName,Count-2,Compress),
+ rot_file_name(FileName,Count-1,Compress)),
+ rotate_files(FileName,Count-1,Compress).
+
+rot_file_name(FileName,Count,false) ->
+ FileName ++ "." ++ integer_to_list(Count);
+rot_file_name(FileName,Count,true) ->
+ rot_file_name(FileName,Count,false) ++ ".gz".
+
+compress_file(FileName) ->
+ {ok,In} = file:open(FileName,[read,binary]),
+ {ok,Out} = file:open(FileName++".gz",[write]),
+ Z = zlib:open(),
+ zlib:deflateInit(Z, default, deflated, 31, 8, default),
+ compress_data(Z,In,Out),
+ zlib:deflateEnd(Z),
+ zlib:close(Z),
+ _ = file:close(In),
+ _ = file:close(Out),
+ _ = file:delete(FileName),
+ ok.
+
+compress_data(Z,In,Out) ->
+ case file:read(In,100000) of
+ {ok,Data} ->
+ Compressed = zlib:deflate(Z, Data),
+ _ = file:write(Out,Compressed),
+ compress_data(Z,In,Out);
+ eof ->
+ Compressed = zlib:deflate(Z, <<>>, finish),
+ _ = file:write(Out,Compressed),
+ ok
+ end.
+
+decompress_file(FileName) ->
+ {ok,In} = file:open(FileName,[read,binary]),
+ {ok,Out} = file:open(filename:rootname(FileName,".gz"),[write]),
+ Z = zlib:open(),
+ zlib:inflateInit(Z, 31),
+ decompress_data(Z,In,Out),
+ zlib:inflateEnd(Z),
+ zlib:close(Z),
+ _ = file:close(In),
+ _ = file:close(Out),
+ _ = file:delete(FileName),
+ ok.
+
+decompress_data(Z,In,Out) ->
+ case file:read(In,1000) of
+ {ok,Data} ->
+ Decompressed = zlib:inflate(Z, Data),
+ _ = file:write(Out,Decompressed),
+ decompress_data(Z,In,Out);
+ eof ->
+ ok
+ end.
+
+maybe_notify_error(_Op, ok, _State) ->
+ ok;
+maybe_notify_error(Op, Result, #{write_res:=WR,sync_res:=SR})
+ when (Op==write andalso Result==WR) orelse
+ (Op==filesync andalso Result==SR) ->
+ %% don't report same error twice
+ ok;
+maybe_notify_error(Op, Error, #{handler_name:=HandlerName,file_name:=FileName}) ->
+ logger_h_common:error_notify({HandlerName,Op,FileName,Error}),
+ ok.
+
+timestamp() ->
+ erlang:monotonic_time(millisecond).
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_text_fmt.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_text_fmt.erl
new file mode 100644
index 0000000000..5e703d2fc2
--- /dev/null
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_logger_text_fmt.erl
@@ -0,0 +1,100 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_logger_text_fmt).
+
+-export([format/2]).
+
+format(#{msg := Msg, meta := Meta} = LogEvent, Config) ->
+ Prefix = format_prefix(LogEvent, Config),
+ Color = pick_color(LogEvent, Config),
+ FormattedMsg = rabbit_logger_fmt_helpers:format_msg(Msg, Meta, Config),
+ prepend_prefix_to_msg_and_add_color(
+ Prefix, Color, FormattedMsg, LogEvent, Config).
+
+format_prefix(LogEvent, #{prefix_format := PrefixFormat} = Config) ->
+ format_prefix(PrefixFormat, LogEvent, Config, []);
+format_prefix(LogEvent, Config) ->
+ %% Default prefix format.
+ format_prefix([time, " [", level, "] ", pid, " "], LogEvent, Config, []).
+
+format_prefix([String | Rest], LogEvent, Config, Prefix)
+ when is_list(String) ->
+ format_prefix(Rest, LogEvent, Config, [String | Prefix]);
+format_prefix([Var | Rest], LogEvent, Config, Prefix)
+ when is_atom(Var) ->
+ String = format_var(Var, LogEvent, Config),
+ format_prefix(Rest, LogEvent, Config, [String | Prefix]);
+format_prefix([], _, _, Prefix) ->
+ lists:reverse(Prefix).
+
+format_var(level, #{level := Level}, Config) ->
+ rabbit_logger_fmt_helpers:format_level(Level, Config);
+format_var(time, #{meta := #{time := Timestamp}}, Config) ->
+ rabbit_logger_fmt_helpers:format_time(Timestamp, Config);
+format_var(Var, #{meta := Meta}, _) ->
+ case maps:get(Var, Meta, undefined) of
+ undefined ->
+ io_lib:format("<unknown ~s>", [Var]);
+ Value ->
+ case io_lib:char_list(Value) of
+ true -> io_lib:format("~s", [Value]);
+ false -> io_lib:format("~p", [Value])
+ end
+ end.
+
+pick_color(#{level := Level}, #{use_colors := true} = Config) ->
+ ColorStart = level_to_color(Level, Config),
+ ColorEnd = "\033[0m",
+ {ColorStart, ColorEnd};
+pick_color(_, _) ->
+ {"", ""}.
+
+level_to_color(Level, #{color_esc_seqs := ColorEscSeqs}) ->
+ maps:get(Level, ColorEscSeqs);
+level_to_color(debug, _) -> "\033[38;5;246m";
+level_to_color(info, _) -> "";
+level_to_color(notice, _) -> "\033[38;5;87m";
+level_to_color(warning, _) -> "\033[38;5;214m";
+level_to_color(error, _) -> "\033[38;5;160m";
+level_to_color(critical, _) -> "\033[1;37m\033[48;5;20m";
+level_to_color(alert, _) -> "\033[1;37m\033[48;5;93m";
+level_to_color(emergency, _) -> "\033[1;37m\033[48;5;196m".
+
+prepend_prefix_to_msg_and_add_color(
+ Prefix, {ColorStart, ColorEnd}, FormattedMsg, LogEvent, Config) ->
+ Lines = split_lines(FormattedMsg, Config),
+ [[ColorStart,
+ format_line(Prefix, Line, LogEvent, Config),
+ ColorEnd,
+ $\n]
+ || Line <- Lines].
+
+split_lines(FormattedMsg, _) ->
+ FlattenMsg = lists:flatten(FormattedMsg),
+ string:split(FlattenMsg, [$\n], all).
+
+format_line(Prefix, Msg, LogEvent, #{line_format := Format} = Config) ->
+ format_line(Format, Msg, LogEvent, Config, [Prefix]);
+format_line(Prefix, Msg, LogEvent, Config) ->
+ format_line([msg], Msg, LogEvent, Config, [Prefix]).
+
+format_line([msg | Rest], Msg, LogEvent, Config, Line) ->
+ format_line(Rest, Msg, LogEvent, Config, [Msg | Line]);
+format_line([String | Rest], Msg, LogEvent, Config, Line)
+ when is_list(String) ->
+ format_line(Rest, Msg, LogEvent, Config, [String | Line]);
+format_line([Var | Rest], Msg, LogEvent, Config, Line)
+ when is_atom(Var) ->
+ String = format_var(Var, LogEvent, Config),
+ format_line(Rest, Msg, LogEvent, Config, [String | Line]);
+format_line([], _, _, _, Line) ->
+ remove_trailing_whitespaces(Line).
+
+remove_trailing_whitespaces([Tail | Line]) ->
+ Tail1 = string:strip(Tail, right),
+ lists:reverse([Tail1 | Line]).
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch.erl
index b6b29481c7..353403a8e1 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch.erl
@@ -1,7 +1,10 @@
-module(rabbit_prelaunch).
+-include_lib("kernel/include/logger.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbit_common/include/logging.hrl").
+
-export([run_prelaunch_first_phase/0,
assert_mnesia_is_stopped/0,
get_context/0,
@@ -24,6 +27,8 @@
run_prelaunch_first_phase() ->
try
+ ok = logger:set_process_metadata(
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
do_run()
catch
throw:{error, _} = Error ->
@@ -67,26 +72,25 @@ do_run() ->
?assertMatch(#{}, Context0),
%% Setup logging for the prelaunch phase.
- ok = rabbit_prelaunch_early_logging:setup_early_logging(Context0, true),
+ ok = rabbit_prelaunch_early_logging:setup_early_logging(Context0),
IsInitialPass = is_initial_pass(),
case IsInitialPass of
true ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug(
- "== Prelaunch phase [1/2] (initial pass) =="),
- rabbit_log_prelaunch:debug("");
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Prelaunch phase [1/2] (initial pass) =="),
+ ?LOG_DEBUG("");
false ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Prelaunch phase [1/2] =="),
- rabbit_log_prelaunch:debug("")
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Prelaunch phase [1/2] =="),
+ ?LOG_DEBUG("")
end,
rabbit_env:log_process_env(),
%% Load rabbitmq-env.conf, redo logging setup and continue.
Context1 = rabbit_env:get_context_after_logging_init(Context0),
?assertMatch(#{}, Context1),
- ok = rabbit_prelaunch_early_logging:setup_early_logging(Context1, true),
+ ok = rabbit_prelaunch_early_logging:setup_early_logging(Context1),
rabbit_env:log_process_env(),
%% Complete context now that we have the final environment loaded.
@@ -111,8 +115,15 @@ do_run() ->
ok = rabbit_prelaunch_dist:setup(Context),
%% 4. Write PID file.
- rabbit_log_prelaunch:debug(""),
+ ?LOG_DEBUG(""),
_ = write_pid_file(Context),
+
+ %% Garbage collect before returning because we do not want
+ %% to keep memory around forever unnecessarily, even if just
+ %% a few MiBs, because it will pollute output from tools like
+ %% Observer or observer_cli.
+ _ = erlang:garbage_collect(),
+
ignore.
assert_mnesia_is_stopped() ->
@@ -138,7 +149,7 @@ get_stop_reason() ->
set_stop_reason(Reason) ->
case get_stop_reason() of
undefined ->
- rabbit_log_prelaunch:debug("Set stop reason to: ~p", [Reason]),
+ ?LOG_DEBUG("Set stop reason to: ~p", [Reason]),
persistent_term:put(?PT_KEY_STOP_REASON, Reason);
_ ->
ok
@@ -161,7 +172,7 @@ setup_shutdown_func() ->
{ok, {ThisMod, ThisFunc}} ->
ok;
{ok, {ExistingMod, ExistingFunc}} ->
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Setting up kernel shutdown function: ~s:~s/1 "
"(chained with ~s:~s/1)",
[ThisMod, ThisFunc, ExistingMod, ExistingFunc]),
@@ -170,7 +181,7 @@ setup_shutdown_func() ->
ExistingShutdownFunc),
ok = record_kernel_shutdown_func(ThisMod, ThisFunc);
_ ->
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Setting up kernel shutdown function: ~s:~s/1",
[ThisMod, ThisFunc]),
ok = record_kernel_shutdown_func(ThisMod, ThisFunc)
@@ -182,7 +193,7 @@ record_kernel_shutdown_func(Mod, Func) ->
[{persistent, true}]).
shutdown_func(Reason) ->
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Running ~s:shutdown_func() as part of `kernel` shutdown", [?MODULE]),
Context = get_context(),
remove_pid_file(Context),
@@ -195,7 +206,7 @@ shutdown_func(Reason) ->
end.
write_pid_file(#{pid_file := PidFile}) ->
- rabbit_log_prelaunch:debug("Writing PID file: ~s", [PidFile]),
+ ?LOG_DEBUG("Writing PID file: ~s", [PidFile]),
case filelib:ensure_dir(PidFile) of
ok ->
OSPid = os:getpid(),
@@ -203,13 +214,13 @@ write_pid_file(#{pid_file := PidFile}) ->
ok ->
ok;
{error, Reason} = Error ->
- rabbit_log_prelaunch:warning(
+ ?LOG_WARNING(
"Failed to write PID file \"~s\": ~s",
[PidFile, file:format_error(Reason)]),
Error
end;
{error, Reason} = Error ->
- rabbit_log_prelaunch:warning(
+ ?LOG_WARNING(
"Failed to create PID file \"~s\" directory: ~s",
[PidFile, file:format_error(Reason)]),
Error
@@ -218,10 +229,10 @@ write_pid_file(_) ->
ok.
remove_pid_file(#{pid_file := PidFile, keep_pid_file_on_exit := true}) ->
- rabbit_log_prelaunch:debug("Keeping PID file: ~s", [PidFile]),
+ ?LOG_DEBUG("Keeping PID file: ~s", [PidFile]),
ok;
remove_pid_file(#{pid_file := PidFile}) ->
- rabbit_log_prelaunch:debug("Deleting PID file: ~s", [PidFile]),
+ ?LOG_DEBUG("Deleting PID file: ~s", [PidFile]),
_ = file:delete(PidFile),
ok;
remove_pid_file(_) ->
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_conf.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_conf.erl
index fbbae7a185..0df414bad7 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_conf.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_conf.erl
@@ -1,9 +1,11 @@
-module(rabbit_prelaunch_conf).
-include_lib("kernel/include/file.hrl").
+-include_lib("kernel/include/logger.hrl").
-include_lib("stdlib/include/zip.hrl").
-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/logging.hrl").
-export([setup/1,
get_config_state/0,
@@ -15,8 +17,9 @@
-endif.
setup(Context) ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Configuration =="),
+ ?LOG_DEBUG(
+ "\n== Configuration ==",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
%% TODO: Check if directories/files are inside Mnesia dir.
@@ -52,9 +55,10 @@ setup(Context) ->
#{config_files => ConfigFiles,
config_advanced_file => AdvancedConfigFile};
undefined when AdvancedConfigFile =/= undefined ->
- rabbit_log_prelaunch:warning(
+ ?LOG_WARNING(
"Using RABBITMQ_ADVANCED_CONFIG_FILE: ~s",
- [AdvancedConfigFile]),
+ [AdvancedConfigFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
Config = load_cuttlefish_config_file(Context,
AdditionalConfigFiles,
AdvancedConfigFile),
@@ -66,10 +70,10 @@ setup(Context) ->
#{config_files => [],
config_advanced_file => undefined}
end,
- ok = override_with_hard_coded_critical_config(),
ok = set_credentials_obfuscation_secret(),
- rabbit_log_prelaunch:debug(
- "Saving config state to application env: ~p", [State]),
+ ?LOG_DEBUG(
+ "Saving config state to application env: ~p", [State],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
store_config_state(State).
store_config_state(ConfigState) ->
@@ -83,7 +87,16 @@ get_config_state() ->
%% -------------------------------------------------------------------
set_default_config() ->
- rabbit_log_prelaunch:debug("Setting default config"),
+ ?LOG_DEBUG("Setting default config",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ OsirisConfig =
+ case osiris_util:get_replication_configuration_from_tls_dist(
+ fun osiris_log/3) of
+ [] ->
+ [];
+ OsirisTlsReplicationConfig ->
+ [{osiris, OsirisTlsReplicationConfig}]
+ end,
Config = [
{ra,
[
@@ -99,6 +112,8 @@ set_default_config() ->
%% goes down it is still immediately detected
{poll_interval, 5000}
]},
+ {syslog,
+ [{app_name, "rabbitmq-server"}]},
{sysmon_handler,
[{process_limit, 100},
{port_limit, 100},
@@ -107,9 +122,20 @@ set_default_config() ->
{heap_word_limit, 0},
{busy_port, false},
{busy_dist_port, true}]}
+ | OsirisConfig
],
apply_erlang_term_based_config(Config).
+osiris_log(debug, Fmt, Args) ->
+ ?LOG_DEBUG(Fmt, Args,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH});
+osiris_log(warn, Fmt, Args) ->
+ ?LOG_WARNING(Fmt, Args,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH});
+osiris_log(_, Fmt, Args) ->
+ ?LOG_INFO(Fmt, Args,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
find_actual_main_config_file(#{main_config_file := File}) ->
case filelib:is_regular(File) of
true ->
@@ -126,15 +152,18 @@ find_actual_main_config_file(#{main_config_file := File}) ->
true ->
case filelib:is_regular(NewFormatFile) of
true ->
- rabbit_log_prelaunch:warning(
+ ?LOG_WARNING(
"Both old (.config) and new (.conf) format "
- "config files exist."),
- rabbit_log_prelaunch:warning(
+ "config files exist.",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_WARNING(
"Using the old format config file: ~s",
- [OldFormatFile]),
- rabbit_log_prelaunch:warning(
+ [OldFormatFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_WARNING(
"Please update your config files to the new "
- "format and remove the old file."),
+ "format and remove the old file.",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok;
false ->
ok
@@ -193,15 +222,18 @@ generate_config_from_cuttlefish_files(Context,
SchemaFiles = find_cuttlefish_schemas(Context),
case SchemaFiles of
[] ->
- rabbit_log_prelaunch:error(
- "No configuration schema found~n", []),
+ ?LOG_ERROR(
+ "No configuration schema found", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({error, no_configuration_schema_found});
_ ->
- rabbit_log_prelaunch:debug(
- "Configuration schemas found:~n", []),
+ ?LOG_DEBUG(
+ "Configuration schemas found:~n", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
lists:foreach(
fun(SchemaFile) ->
- rabbit_log_prelaunch:debug(" - ~ts", [SchemaFile])
+ ?LOG_DEBUG(" - ~ts", [SchemaFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})
end,
SchemaFiles),
ok
@@ -209,37 +241,44 @@ generate_config_from_cuttlefish_files(Context,
Schema = cuttlefish_schema:files(SchemaFiles),
%% Load configuration.
- rabbit_log_prelaunch:debug(
- "Loading configuration files (Cuttlefish based):"),
+ ?LOG_DEBUG(
+ "Loading configuration files (Cuttlefish based):",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
lists:foreach(
fun(ConfigFile) ->
- rabbit_log_prelaunch:debug(" - ~ts", [ConfigFile])
+ ?LOG_DEBUG(" - ~ts", [ConfigFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})
end, ConfigFiles),
case cuttlefish_conf:files(ConfigFiles) of
{errorlist, Errors} ->
- rabbit_log_prelaunch:error("Error parsing configuration:"),
+ ?LOG_ERROR("Error parsing configuration:",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
lists:foreach(
fun(Error) ->
- rabbit_log_prelaunch:error(
+ ?LOG_ERROR(
" - ~ts",
- [cuttlefish_error:xlate(Error)])
+ [cuttlefish_error:xlate(Error)],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})
end, Errors),
- rabbit_log_prelaunch:error(
- "Are these files using the Cuttlefish format?"),
+ ?LOG_ERROR(
+ "Are these files using the Cuttlefish format?",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({error, failed_to_parse_configuration_file});
Config0 ->
%% Finalize configuration, based on the schema.
Config = case cuttlefish_generator:map(Schema, Config0) of
{error, Phase, {errorlist, Errors}} ->
%% TODO
- rabbit_log_prelaunch:error(
+ ?LOG_ERROR(
"Error preparing configuration in phase ~ts:",
- [Phase]),
+ [Phase],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
lists:foreach(
fun(Error) ->
- rabbit_log_prelaunch:error(
+ ?LOG_ERROR(
" - ~ts",
- [cuttlefish_error:xlate(Error)])
+ [cuttlefish_error:xlate(Error)],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})
end, Errors),
throw(
{error, failed_to_prepare_configuration});
@@ -253,8 +292,9 @@ generate_config_from_cuttlefish_files(Context,
find_cuttlefish_schemas(Context) ->
Apps = list_apps(Context),
- rabbit_log_prelaunch:debug(
- "Looking up configuration schemas in the following applications:"),
+ ?LOG_DEBUG(
+ "Looking up configuration schemas in the following applications:",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
find_cuttlefish_schemas(Apps, []).
find_cuttlefish_schemas([App | Rest], AllSchemas) ->
@@ -264,10 +304,10 @@ find_cuttlefish_schemas([], AllSchemas) ->
lists:sort(fun(A,B) -> A < B end, AllSchemas).
list_apps(#{os_type := {win32, _}, plugins_path := PluginsPath}) ->
- PluginsDirs = string:lexemes(PluginsPath, ";"),
+ PluginsDirs = lists:usort(string:lexemes(PluginsPath, ";")),
list_apps1(PluginsDirs, []);
list_apps(#{plugins_path := PluginsPath}) ->
- PluginsDirs = string:lexemes(PluginsPath, ":"),
+ PluginsDirs = lists:usort(string:lexemes(PluginsPath, ":")),
list_apps1(PluginsDirs, []).
@@ -281,9 +321,10 @@ list_apps1([Dir | Rest], Apps) ->
Apps1 = lists:umerge(Apps, lists:sort(NewApps)),
list_apps1(Rest, Apps1);
{error, Reason} ->
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Failed to list directory \"~ts\" content: ~ts",
- [Dir, file:format_error(Reason)]),
+ [Dir, file:format_error(Reason)],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
list_apps1(Rest, Apps)
end;
list_apps1([], AppInfos) ->
@@ -299,17 +340,19 @@ list_schemas_in_app(App) ->
true ->
case code:priv_dir(App) of
{error, bad_name} ->
- rabbit_log_prelaunch:debug(
- " [ ] ~s (no readable priv dir)", [App]),
+ ?LOG_DEBUG(
+ " [ ] ~s (no readable priv dir)", [App],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
[];
PrivDir ->
SchemaDir = filename:join([PrivDir, "schema"]),
do_list_schemas_in_app(App, SchemaDir)
end;
Reason1 ->
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
" [ ] ~s (failed to load application: ~p)",
- [App, Reason1]),
+ [App, Reason1],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
[]
end,
case Unload of
@@ -322,74 +365,95 @@ list_schemas_in_app(App) ->
do_list_schemas_in_app(App, SchemaDir) ->
case erl_prim_loader:list_dir(SchemaDir) of
{ok, Files} ->
- rabbit_log_prelaunch:debug(" [x] ~s", [App]),
+ ?LOG_DEBUG(" [x] ~s", [App],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
[filename:join(SchemaDir, File)
|| [C | _] = File <- Files,
C =/= $.];
error ->
- rabbit_log_prelaunch:debug(
- " [ ] ~s (no readable schema dir)", [App]),
+ ?LOG_DEBUG(
+ " [ ] ~s (no readable schema dir)", [App],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
[]
end.
override_with_advanced_config(Config, undefined) ->
Config;
override_with_advanced_config(Config, AdvancedConfigFile) ->
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Override with advanced configuration file \"~ts\"",
- [AdvancedConfigFile]),
+ [AdvancedConfigFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
case file:consult(AdvancedConfigFile) of
{ok, [AdvancedConfig]} ->
cuttlefish_advanced:overlay(Config, AdvancedConfig);
{ok, OtherTerms} ->
- rabbit_log_prelaunch:error(
+ ?LOG_ERROR(
"Failed to load advanced configuration file \"~ts\", "
"incorrect format: ~p",
- [AdvancedConfigFile, OtherTerms]),
+ [AdvancedConfigFile, OtherTerms],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({error, failed_to_parse_advanced_configuration_file});
{error, Reason} ->
- rabbit_log_prelaunch:error(
+ ?LOG_ERROR(
"Failed to load advanced configuration file \"~ts\": ~ts",
- [AdvancedConfigFile, file:format_error(Reason)]),
+ [AdvancedConfigFile, file:format_error(Reason)],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({error, failed_to_read_advanced_configuration_file})
end.
-override_with_hard_coded_critical_config() ->
- rabbit_log_prelaunch:debug("Override with hard-coded critical config"),
- Config = [
- {ra,
- %% Make Ra use a custom logger that dispatches to lager
- %% instead of the default OTP logger
- [{logger_module, rabbit_log_ra_shim}]},
- {osiris,
- [{logger_module, rabbit_log_osiris_shim}]}
- ],
- apply_erlang_term_based_config(Config).
-
apply_erlang_term_based_config([{_, []} | Rest]) ->
apply_erlang_term_based_config(Rest);
apply_erlang_term_based_config([{App, Vars} | Rest]) ->
- rabbit_log_prelaunch:debug(" Applying configuration for '~s':", [App]),
+ ?LOG_DEBUG(" Applying configuration for '~s':", [App],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok = apply_app_env_vars(App, Vars),
apply_erlang_term_based_config(Rest);
apply_erlang_term_based_config([]) ->
ok.
apply_app_env_vars(App, [{Var, Value} | Rest]) ->
- rabbit_log_prelaunch:debug(" - ~s = ~p", [Var, Value]),
+ log_app_env_var(Var, Value),
ok = application:set_env(App, Var, Value, [{persistent, true}]),
apply_app_env_vars(App, Rest);
apply_app_env_vars(_, []) ->
ok.
+log_app_env_var(password = Var, _) ->
+ ?LOG_DEBUG(" - ~s = ********", [Var],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH});
+log_app_env_var(Var, Value) when is_list(Value) ->
+ %% To redact sensitive entries,
+ %% e.g. {password,"********"} for stream replication over TLS
+ Redacted = redact_env_var(Value),
+ ?LOG_DEBUG(" - ~s = ~p", [Var, Redacted],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH});
+log_app_env_var(Var, Value) ->
+ ?LOG_DEBUG(" - ~s = ~p", [Var, Value],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+redact_env_var(Value) when is_list(Value) ->
+ redact_env_var(Value, []);
+redact_env_var(Value) ->
+ Value.
+
+redact_env_var([], Acc) ->
+ lists:reverse(Acc);
+redact_env_var([{password, _Value} | Rest], Acc) ->
+ redact_env_var(Rest, Acc ++ [{password, "********"}]);
+redact_env_var([AppVar | Rest], Acc) ->
+ redact_env_var(Rest, [AppVar | Acc]).
+
set_credentials_obfuscation_secret() ->
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Refreshing credentials obfuscation configuration from env: ~p",
- [application:get_all_env(credentials_obfuscation)]),
+ [application:get_all_env(credentials_obfuscation)],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok = credentials_obfuscation:refresh_config(),
CookieBin = rabbit_data_coercion:to_binary(erlang:get_cookie()),
- rabbit_log_prelaunch:debug(
- "Setting credentials obfuscation secret to '~s'", [CookieBin]),
+ ?LOG_DEBUG(
+ "Setting credentials obfuscation secret to '~s'", [CookieBin],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok = credentials_obfuscation:set_secret(CookieBin).
%% -------------------------------------------------------------------
@@ -397,7 +461,8 @@ set_credentials_obfuscation_secret() ->
%% -------------------------------------------------------------------
decrypt_config(Apps) ->
- rabbit_log_prelaunch:debug("Decoding encrypted config values (if any)"),
+ ?LOG_DEBUG("Decoding encrypted config values (if any)",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ConfigEntryDecoder = application:get_env(rabbit, config_entry_decoder, []),
decrypt_config(Apps, ConfigEntryDecoder).
@@ -415,8 +480,9 @@ decrypt_app(App, [{Key, Value} | Tail], Algo) ->
{Value, Algo1} ->
Algo1;
{NewValue, Algo1} ->
- rabbit_log_prelaunch:debug(
- "Value of `~s` decrypted", [Key]),
+ ?LOG_DEBUG(
+ "Value of `~s` decrypted", [Key],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok = application:set_env(App, Key, NewValue,
[{persistent, true}]),
Algo1
@@ -474,7 +540,8 @@ config_entry_decoder_to_algo(ConfigEntryDecoder) ->
end.
get_passphrase(ConfigEntryDecoder) ->
- rabbit_log_prelaunch:debug("Getting encrypted config passphrase"),
+ ?LOG_DEBUG("Getting encrypted config passphrase",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
case proplists:get_value(passphrase, ConfigEntryDecoder) of
prompt ->
IoDevice = get_input_iodevice(),
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_dist.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_dist.erl
index 3d718438a7..2c5c356357 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_dist.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_dist.erl
@@ -1,12 +1,20 @@
-module(rabbit_prelaunch_dist).
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/logging.hrl").
+
-export([setup/1]).
setup(#{nodename := Node, nodename_type := NameType} = Context) ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Erlang distribution =="),
- rabbit_log_prelaunch:debug("Rqeuested node name: ~s (type: ~s)",
- [Node, NameType]),
+ ?LOG_DEBUG(
+ "~n== Erlang distribution ==", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_DEBUG(
+ "Rqeuested node name: ~s (type: ~s)",
+ [Node, NameType],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
case node() of
nonode@nohost ->
ok = rabbit_nodes_common:ensure_epmd(),
@@ -16,8 +24,9 @@ setup(#{nodename := Node, nodename_type := NameType} = Context) ->
ok = do_setup(Context);
Node ->
- rabbit_log_prelaunch:debug(
- "Erlang distribution already running", []),
+ ?LOG_DEBUG(
+ "Erlang distribution already running", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok;
Unexpected ->
throw({error, {erlang_dist_running_with_unexpected_nodename,
@@ -25,8 +34,12 @@ setup(#{nodename := Node, nodename_type := NameType} = Context) ->
end,
ok.
-do_setup(#{nodename := Node, nodename_type := NameType}) ->
- rabbit_log_prelaunch:debug("Starting Erlang distribution", []),
+do_setup(#{nodename := Node,
+ nodename_type := NameType,
+ var_origins := Origins} = Config) ->
+ ?LOG_DEBUG(
+ "Starting Erlang distribution",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
case application:get_env(kernel, net_ticktime) of
{ok, Ticktime} when is_integer(Ticktime) andalso Ticktime >= 1 ->
%% The value passed to net_kernel:start/1 is the
@@ -39,12 +52,26 @@ do_setup(#{nodename := Node, nodename_type := NameType}) ->
{ok, _} = net_kernel:start([Node, NameType]),
ok
end,
+
+ %% Override the Erlang cookie if one was set in the environment.
+ case maps:get(erlang_cookie, Origins, default) of
+ environment ->
+ ?LOG_WARNING(
+ "Overriding Erlang cookie using the value set in the environment",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ Cookie = maps:get(erlang_cookie, Config),
+ ?assert(is_atom(Cookie)),
+ true = erlang:set_cookie(node(), Cookie);
+ _ ->
+ ok
+ end,
ok.
%% Check whether a node with the same name is already running
duplicate_node_check(#{split_nodename := {NodeName, NodeHost}}) ->
- rabbit_log_prelaunch:debug(
- "Checking if node name ~s is already used", [NodeName]),
+ ?LOG_DEBUG(
+ "Checking if node name ~s is already used", [NodeName],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
PrelaunchName = rabbit_nodes_common:make(
{NodeName ++ "_prelaunch_" ++ os:getpid(),
"localhost"}),
@@ -63,8 +90,9 @@ duplicate_node_check(#{split_nodename := {NodeName, NodeHost}}) ->
end.
dist_port_range_check(#{erlang_dist_tcp_port := DistTcpPort}) ->
- rabbit_log_prelaunch:debug(
- "Checking if TCP port ~b is valid", [DistTcpPort]),
+ ?LOG_DEBUG(
+ "Checking if TCP port ~b is valid", [DistTcpPort],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
case DistTcpPort of
_ when DistTcpPort < 1 orelse DistTcpPort > 65535 ->
throw({error, {invalid_dist_port_range, DistTcpPort}});
@@ -74,8 +102,9 @@ dist_port_range_check(#{erlang_dist_tcp_port := DistTcpPort}) ->
dist_port_use_check(#{split_nodename := {_, NodeHost},
erlang_dist_tcp_port := DistTcpPort}) ->
- rabbit_log_prelaunch:debug(
- "Checking if TCP port ~b is available", [DistTcpPort]),
+ ?LOG_DEBUG(
+ "Checking if TCP port ~b is available", [DistTcpPort],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
dist_port_use_check_ipv4(NodeHost, DistTcpPort).
dist_port_use_check_ipv4(NodeHost, Port) ->
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_early_logging.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_early_logging.erl
index 4e371c76ae..9568abab14 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_early_logging.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_early_logging.erl
@@ -1,60 +1,141 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
-module(rabbit_prelaunch_early_logging).
--include_lib("rabbit_common/include/rabbit_log.hrl").
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/logging.hrl").
--export([setup_early_logging/2,
+-export([setup_early_logging/1,
+ default_formatter/1,
+ default_console_formatter/1,
+ default_file_formatter/1,
+ default_journald_formatter/1,
+ default_syslog_formatter/1,
enable_quick_dbg/1,
use_colored_logging/0,
use_colored_logging/1,
- list_expected_sinks/0]).
-
-setup_early_logging(#{log_levels := undefined} = Context,
- LagerEventToStdout) ->
- setup_early_logging(Context#{log_levels => get_default_log_level()},
- LagerEventToStdout);
-setup_early_logging(Context, LagerEventToStdout) ->
- Configured = lists:member(
- lager_util:make_internal_sink_name(rabbit_log_prelaunch),
- lager:list_all_sinks()),
- case Configured of
+ translate_formatter_conf/2,
+ translate_journald_fields_conf/2]).
+-export([filter_log_event/2]).
+
+-ifdef(TEST).
+-export([levels/0,
+ determine_prefix/1]).
+-endif.
+
+-define(CONFIGURED_KEY, {?MODULE, configured}).
+
+setup_early_logging(#{log_levels := undefined} = Context) ->
+ setup_early_logging(Context#{log_levels => get_default_log_level()});
+setup_early_logging(Context) ->
+ case is_configured() of
true -> ok;
- false -> do_setup_early_logging(Context, LagerEventToStdout)
+ false -> do_setup_early_logging(Context)
end.
get_default_log_level() ->
- #{"prelaunch" => warning}.
-
-do_setup_early_logging(#{log_levels := LogLevels} = Context,
- LagerEventToStdout) ->
- redirect_logger_messages_to_lager(),
- Colored = use_colored_logging(Context),
- application:set_env(lager, colored, Colored),
- ConsoleBackend = lager_console_backend,
- case LagerEventToStdout of
- true ->
- GLogLevel = case LogLevels of
- #{global := Level} -> Level;
- _ -> warning
- end,
- _ = lager_app:start_handler(
- lager_event, ConsoleBackend, [{level, GLogLevel}]),
- ok;
- false ->
- ok
- end,
- lists:foreach(
- fun(Sink) ->
- CLogLevel = get_log_level(LogLevels, Sink),
- lager_app:configure_sink(
- Sink,
- [{handlers, [{ConsoleBackend, [{level, CLogLevel}]}]}])
- end, list_expected_sinks()),
+ #{"prelaunch" => notice}.
+
+do_setup_early_logging(#{log_levels := LogLevels} = Context) ->
+ add_rmqlog_filter(LogLevels),
+ ok = logger:update_handler_config(
+ default, main_handler_config(Context)).
+
+is_configured() ->
+ persistent_term:get(?CONFIGURED_KEY, false).
+
+add_rmqlog_filter(LogLevels) ->
+ add_erlang_specific_filters(LogLevels),
+ FilterConfig0 = lists:foldl(
+ fun
+ ({_, V}, FC) when is_boolean(V) -> FC;
+ ({K, V}, FC) when is_atom(K) -> FC#{K => V};
+ ({K, V}, FC) -> FC#{list_to_atom(K) => V}
+ end, #{}, maps:to_list(LogLevels)),
+ FilterConfig1 = case maps:is_key(global, FilterConfig0) of
+ true -> FilterConfig0;
+ false -> FilterConfig0#{global => ?DEFAULT_LOG_LEVEL}
+ end,
+ ok = logger:add_handler_filter(
+ default, ?FILTER_NAME, {fun filter_log_event/2, FilterConfig1}),
+ ok = logger:set_primary_config(level, all),
+ ok = persistent_term:put(?CONFIGURED_KEY, true).
+
+add_erlang_specific_filters(_) ->
+ _ = logger:add_handler_filter(
+ default, progress_reports, {fun logger_filters:progress/2, stop}),
ok.
-redirect_logger_messages_to_lager() ->
- io:format(standard_error, "Configuring logger redirection~n", []),
- ok = logger:add_handler(rabbit_log, rabbit_log, #{}),
- ok = logger:set_primary_config(level, all).
+filter_log_event(
+ #{meta := #{domain := ?RMQLOG_DOMAIN_GLOBAL}} = LogEvent,
+ FilterConfig) ->
+ MinLevel = get_min_level(global, FilterConfig),
+ do_filter_log_event(LogEvent, MinLevel);
+filter_log_event(
+ #{meta := #{domain := [?RMQLOG_SUPER_DOMAIN_NAME, CatName | _]}} = LogEvent,
+ FilterConfig) ->
+ MinLevel = get_min_level(CatName, FilterConfig),
+ do_filter_log_event(LogEvent, MinLevel);
+filter_log_event(
+ #{meta := #{domain := [CatName | _]}} = LogEvent,
+ FilterConfig) ->
+ MinLevel = get_min_level(CatName, FilterConfig),
+ do_filter_log_event(LogEvent, MinLevel);
+filter_log_event(LogEvent, FilterConfig) ->
+ MinLevel = get_min_level(global, FilterConfig),
+ do_filter_log_event(LogEvent, MinLevel).
+
+get_min_level(global, FilterConfig) ->
+ maps:get(global, FilterConfig, none);
+get_min_level(CatName, FilterConfig) ->
+ case maps:is_key(CatName, FilterConfig) of
+ true -> maps:get(CatName, FilterConfig);
+ false -> get_min_level(global, FilterConfig)
+ end.
+
+do_filter_log_event(_, none) ->
+ stop;
+do_filter_log_event(#{level := Level} = LogEvent, MinLevel) ->
+ case logger:compare_levels(Level, MinLevel) of
+ lt -> stop;
+ _ -> LogEvent
+ end.
+
+main_handler_config(Context) ->
+ #{filter_default => log,
+ formatter => default_formatter(Context)}.
+
+default_formatter(#{log_levels := #{json := true}} = Context) ->
+ SingleLine = format_msgs_as_single_lines(Context),
+ {rabbit_logger_json_fmt, #{single_line => SingleLine}};
+default_formatter(Context) ->
+ Color = use_colored_logging(Context),
+ SingleLine = format_msgs_as_single_lines(Context),
+ {rabbit_logger_text_fmt, #{use_colors => Color,
+ single_line => SingleLine}}.
+
+default_console_formatter(Context) ->
+ default_formatter(Context).
+
+default_file_formatter(Context) ->
+ default_formatter(Context#{output_supports_colors => false}).
+
+default_journald_formatter(_Context) ->
+ {rabbit_logger_text_fmt, #{prefix_format => [],
+ use_colors => false}}.
+
+default_syslog_formatter(Context) ->
+ {Module, Config} = default_file_formatter(Context),
+ case Module of
+ rabbit_logger_text_fmt -> {Module, Config#{prefix_format => []}};
+ rabbit_logger_json_fmt -> {Module, Config}
+ end.
use_colored_logging() ->
use_colored_logging(rabbit_prelaunch:get_context()).
@@ -65,45 +146,13 @@ use_colored_logging(#{log_levels := #{color := true},
use_colored_logging(_) ->
false.
-list_expected_sinks() ->
- Key = {?MODULE, lager_extra_sinks},
- case persistent_term:get(Key, undefined) of
- undefined ->
- CompileOptions = proplists:get_value(options,
- module_info(compile),
- []),
- AutoList = [lager_util:make_internal_sink_name(M)
- || M <- proplists:get_value(lager_extra_sinks,
- CompileOptions, [])],
- List = case lists:member(?LAGER_SINK, AutoList) of
- true -> AutoList;
- false -> [?LAGER_SINK | AutoList]
- end,
- %% Store the list in the application environment. If this
- %% module is later cover-compiled, the compile option will
- %% be lost, so we will be able to retrieve the list from the
- %% application environment.
- persistent_term:put(Key, List),
- List;
- List ->
- List
- end.
-
-sink_to_category(Sink) when is_atom(Sink) ->
- re:replace(
- atom_to_list(Sink),
- "^rabbit_log_(.+)_lager_event$",
- "\\1",
- [{return, list}]).
-
-get_log_level(LogLevels, Sink) ->
- Category = sink_to_category(Sink),
- case LogLevels of
- #{Category := Level} -> Level;
- #{global := Level} -> Level;
- _ -> warning
- end.
+format_msgs_as_single_lines(#{log_levels := #{single_line := true}}) ->
+ true;
+format_msgs_as_single_lines(_) ->
+ false.
+enable_quick_dbg(#{dbg_mods := []}) ->
+ ok;
enable_quick_dbg(#{dbg_output := Output, dbg_mods := Mods}) ->
case Output of
stdout -> {ok, _} = dbg:tracer(),
@@ -113,3 +162,423 @@ enable_quick_dbg(#{dbg_output := Output, dbg_mods := Mods}) ->
end,
{ok, _} = dbg:p(all, c),
lists:foreach(fun(M) -> {ok, _} = dbg:tp(M, cx) end, Mods).
+
+%% -------------------------------------------------------------------
+%% Internal function used by our Cuttlefish schema.
+%% -------------------------------------------------------------------
+
+-spec translate_formatter_conf(string(), cuttlefish_conf:conf()) ->
+ {rabbit_logger_text_fmt, formatter_plaintext_conf()} |
+ {rabbit_logger_json_fmt, formatter_json_conf()}.
+%% @doc
+%% Called from the Cuttlefish schema to derive the actual formatter
+%% configuration from several Cuttlefish variables.
+
+translate_formatter_conf(Var, Conf) when is_list(Var) ->
+ try
+ %% Part of the formatter configuration is common to all the
+ %% formatters, the rest is formatter-specific.
+ GenericConfig = translate_generic_conf(Var, Conf),
+ case cuttlefish:conf_get(Var, Conf) of
+ plaintext ->
+ translate_plaintext_formatter_conf(Var, Conf, GenericConfig);
+ json ->
+ translate_json_formatter_conf(Var, Conf, GenericConfig)
+ end
+ catch
+ Class:Reason:Stacktrace ->
+ ?LOG_ERROR(
+ rabbit_prelaunch_errors:format_exception(
+ Class, Reason, Stacktrace),
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ throw({configuration_translation_failure, Reason})
+ end.
+
+-type time_format_string_arg() :: year | month | day |
+ hour | minute | second |
+ {second_fractional, non_neg_integer()}.
+-type time_format() :: {rfc3339, char(), string() | integer()} |
+ {epoch, secs | usecs, binary | int} |
+ {local | universal,
+ string(),
+ [time_format_string_arg()]}.
+-type level_format() :: lc | uc | lc3 | uc3 | lc4 | uc4.
+-type formatter_generic_conf() :: #{time_format := time_format(),
+ level_format := level_format(),
+ single_line := boolean()}.
+
+-spec translate_generic_conf(string(), cuttlefish_conf:conf()) ->
+ formatter_generic_conf().
+%% @doc
+%% Handles variables common to all formatters.
+
+translate_generic_conf(Var, Conf) ->
+ %% log.*.formatter.time_format
+ %% It accepts either a "named pattern" like `rfc3339_T' or a custom
+ %% pattern.
+ Formatter = cuttlefish:conf_get(Var, Conf),
+ TimeFormat = case cuttlefish:conf_get(Var ++ ".time_format", Conf) of
+ rfc3339_T ->
+ {rfc3339, $T, ""};
+ rfc3339_space ->
+ {rfc3339, $\s, ""};
+ epoch_secs when Formatter =:= json ->
+ {epoch, secs, int};
+ epoch_usecs when Formatter =:= json ->
+ {epoch, usecs, int};
+ epoch_secs ->
+ {epoch, secs, binary};
+ epoch_usecs ->
+ {epoch, usecs, binary};
+ lager_default ->
+ {local,
+ "~4..0b-~2..0b-~2..0b "
+ "~2..0b:~2..0b:~2..0b.~3..0b",
+ [year, month, day,
+ hour, minute, second,
+ {second_fractional, 3}]}
+ end,
+
+ %% log.*.formatter.level_format
+ %% It determines basically if the level should be printed in lowercase or
+ %% uppercase, and fully or truncated (to align messages horizontally).
+ LevelFormat = cuttlefish:conf_get(Var ++ ".level_format", Conf),
+
+ %% log.*.formatter.single_line
+ %% It tells if multi-line messages should be kept as-is or reformatted to
+ %% stay on a single line.
+ SingleLine = cuttlefish:conf_get(Var ++ ".single_line", Conf),
+
+ #{time_format => TimeFormat,
+ level_format => LevelFormat,
+ single_line => SingleLine}.
+
+-type line_format() :: [atom() | string()].
+-type color_esc_seqs() :: #{logger:level() => string()}.
+-type formatter_plaintext_conf() :: #{time_format := time_format(),
+ level_format := level_format(),
+ single_line := boolean(),
+ prefix_format := line_format(),
+ line_format := line_format(),
+ use_colors := boolean(),
+ color_esc_seqs := color_esc_seqs()}.
+
+-spec translate_plaintext_formatter_conf(
+ string(), cuttlefish_conf:conf(), formatter_generic_conf()) ->
+ {rabbit_logger_text_fmt, formatter_plaintext_conf()}.
+%% @doc
+%% Handles variables specific to the plaintext formatter.
+
+translate_plaintext_formatter_conf(Var, Conf, GenericConfig) ->
+ %% log.*.formatter.plaintext.format
+ %% This is a variable-based string used to indicate the message format.
+ %% Here, we parse that pattern to make it easier and more efficient for
+ %% the formatter to format the final message.
+ Format0 = cuttlefish:conf_get(Var ++ ".plaintext.format", Conf),
+ Format = prepare_fmt_format(Format0),
+ {PrefixFormat, LineFormat} = determine_prefix(Format),
+
+ %% log.console.use_colors
+ %% log.console.color_esc_seqs
+ %% Those variables indicates if colors should be used and which one. They
+ %% are specific to the console handler.
+ {UseColors, ColorEscSeqs} = translate_colors_conf(Var, Conf),
+
+ Mod = rabbit_logger_text_fmt,
+ Config = GenericConfig#{prefix_format => PrefixFormat,
+ line_format => LineFormat,
+ use_colors => UseColors,
+ color_esc_seqs => ColorEscSeqs},
+ {Mod, Config}.
+
+-spec prepare_fmt_format(string()) -> [atom() | string()].
+%% @doc
+%% Parse the pattern and prepare a list which makes it easy for the formatter
+%% to format the final message.
+%%
+%% The initial pattern will use variables; for example:
+%% `$time [$level] $pid - $msg'
+%%
+%% Once parsed, the pattern will look like:
+%% `[time, " [", level, "] ", pid, " - ", msg]'
+%%
+%% Variables are taken from the log event structure: `msg' and `level' are
+%% taken from the top-level, other variables come from the `meta' map.
+
+prepare_fmt_format(Format) ->
+ prepare_fmt_format(Format, []).
+
+prepare_fmt_format([$$ | Rest], Parsed) ->
+ {match, [Var, Rest1]} = re:run(Rest, "^([a-zA_Z0-9_]+)(.*)",
+ [{capture, all_but_first, list}]),
+ Var1 = list_to_atom(Var),
+ prepare_fmt_format(Rest1, [Var1 | Parsed]);
+prepare_fmt_format(Rest, Parsed) when Rest =/= "" ->
+ %% We made sure in the guard expression that `Rest' contains at least
+ %% onecharacter. The following regex "eats" at least that character. This
+ %% avoids an infinite loop which would happen if the returned `String' was
+ %% empty and `Rest1' would be the same as `Rest'.
+ {match, [String, Rest1]} = re:run(Rest, "^(.[^$]*)(.*)",
+ [{capture, all_but_first, list}]),
+ prepare_fmt_format(Rest1, [String | Parsed]);
+prepare_fmt_format("", Parsed) ->
+ lists:reverse(Parsed).
+
+determine_prefix(Format) ->
+ %% Based on where the `msg' variable is, we determine the prefix of the
+ %% message. This is later used by the formatter to repeat the prefix for
+ %% each line making a multi-line message.
+ %%
+ %% If `msg' is not logged at all, we consider the line has no prefix.
+ {PrefixFormat0, LineFormat0} =
+ lists:foldl(
+ fun
+ (msg, {PF, LF}) -> {PF, LF ++ [msg]};
+ (Elem, {PF, [] = LF}) -> {PF ++ [Elem], LF};
+ (Elem, {PF, LF}) -> {PF, LF ++ [Elem]}
+ end, {[], []}, Format),
+ case {PrefixFormat0, LineFormat0} of
+ {_, []} -> {[], PrefixFormat0};
+ _ -> {PrefixFormat0, LineFormat0}
+ end.
+
+-spec translate_colors_conf(string(), cuttlefish_conf:conf()) ->
+ {boolean(), map()}.
+%% @doc
+%% Computes the color configuration.
+%%
+%% The function uses the following two variables:
+%% `log.console.use_colors'
+%% `log.console.color_esc_seqs'
+%%
+%% It does not verify what escape sequences are actually configured. It is
+%% entirely possible to play with the cursor position or other control
+%% characters.
+%%
+%% This is only valid for the console output.
+
+translate_colors_conf("log.console.formatter", Conf) ->
+ {
+ cuttlefish:conf_get("log.console.use_colors", Conf),
+ lists:foldl(
+ fun(Lvl, Acc) ->
+ LvlS = atom_to_list(Lvl),
+ Key = "log.console.color_esc_seqs." ++ LvlS,
+ RawVal = cuttlefish:conf_get(Key, Conf),
+ %% The ESC character will be escaped if the user entered the
+ %% string "\033" for instance. We need to convert it back to an
+ %% actual ESC character.
+ Val = re:replace(
+ RawVal,
+ "\\\\(e|033)",
+ "\033",
+ [global, {return, list}]),
+ Acc#{Lvl => Val}
+ end,
+ #{},
+ levels())
+ };
+translate_colors_conf(_, _) ->
+ {false, #{}}.
+
+-type json_field_map() :: [{atom(), atom()} | {atom() | '$REST', false}].
+-type json_verbosity_map() :: #{logger:level() => non_neg_integer(),
+ '$REST' => non_neg_integer()}.
+-type formatter_json_conf() :: #{time_format := time_format(),
+ level_format := level_format(),
+ single_line := boolean(),
+ field_map := json_field_map(),
+ verbosity_map := json_verbosity_map()}.
+
+-spec translate_json_formatter_conf(
+ string(), cuttlefish_conf:conf(), map()) ->
+ {rabbit_logger_json_fmt, formatter_json_conf()}.
+%% @doc
+%% Handles variables specific to the JSON formatter.
+
+translate_json_formatter_conf(Var, Conf, GenericConfig) ->
+ %% log.*.formatter.json.field_map
+ %% It indicates several things:
+ %% - the order of fields; non-mentionned fields go unordered at the end
+ %% of the JSON object
+ %% - if fields should be renamed
+ %% - if fields should be removed from the final object
+ RawFieldMapping = cuttlefish:conf_get(Var ++ ".json.field_map", Conf),
+ FieldMapping = parse_json_field_mapping(RawFieldMapping),
+
+ %% log.*.formatter.json.verbosity_map
+ %% It indicates if a `verbosity' field should be added and how its value
+ %% should be derived from `level'.
+ RawVerbMapping = cuttlefish:conf_get(
+ Var ++ ".json.verbosity_map", Conf),
+ VerbMapping = parse_json_verbosity_mapping(RawVerbMapping),
+
+ Mod = rabbit_logger_json_fmt,
+ Config = GenericConfig#{field_map => FieldMapping,
+ verbosity_map => VerbMapping},
+ {Mod, Config}.
+
+-spec parse_json_field_mapping(string()) -> json_field_map().
+%% @doc
+%% Parses the JSON formatter field_map pattern.
+%%
+%% The pattern is of the form: `time:ts level msg *:-'.
+%%
+%% `time:ts' means the `time' field should be renamed to `ts'.
+%%
+%% `level' means that field should be kept as-is.
+%%
+%% `gl:-' means the `gl' field should be dropped.
+%%
+%% `*:-' means all non-mentionned fields should be dropped.
+%%
+%% The order of fields in the pattern is important: it tells the order of
+%% fields in the final JSON object.
+
+parse_json_field_mapping(RawMapping) ->
+ parse_json_field_mapping(string:split(RawMapping, " ", all), []).
+
+parse_json_field_mapping([Entry | Rest], Mapping) ->
+ Mapping1 = case string:split(Entry, ":", leading) of
+ ["*", "-"] ->
+ [{'$REST', false} | Mapping];
+ [OldS, "-"] ->
+ Old = list_to_atom(OldS),
+ [{Old, false} | Mapping];
+ ["*", _] ->
+ throw({bad_json_mapping, Entry});
+ [OldS, NewS] ->
+ Old = list_to_atom(OldS),
+ New = list_to_atom(NewS),
+ [{Old, New} | Mapping];
+ [KeepS] ->
+ Keep = list_to_atom(KeepS),
+ [{Keep, Keep} | Mapping]
+ end,
+ parse_json_field_mapping(Rest, Mapping1);
+parse_json_field_mapping([], Mapping) ->
+ %% We parsed everything. Now we want to organize fields a bit:
+ %% - All `{atom(), atom()}' (kept or renamed fields) go at the
+ %% beginning, preserving their order
+ %% - All `{_, false}' (removed fields) go at the end
+ {Renames0, Removes0} = lists:partition(
+ fun
+ ({_, false}) -> false;
+ (_) -> true
+ end,
+ Mapping),
+ Renames = lists:reverse(Renames0),
+ %% If all non-mentionned fields are to be removed, only the `{$REST,
+ %% false}' entry is useful.
+ Removes = case lists:member({'$REST', false}, Removes0) of
+ true -> [{'$REST', false}];
+ false -> Removes0
+ end,
+ Renames ++ Removes.
+
+-spec parse_json_verbosity_mapping(string()) -> json_verbosity_map().
+%% @doc
+%% Parses the verbosity_map pattern.
+%%
+%% The pattern is of the form: `debug:2 info:1 *:0'.
+%%
+%% `debug:2' means that the verbosity of the debug level is 2.
+%%
+%% `*:0' means that the verbosity of all non-mentionned levels is 0.
+
+parse_json_verbosity_mapping("") ->
+ #{};
+parse_json_verbosity_mapping(RawMapping) ->
+ parse_json_verbosity_mapping(string:split(RawMapping, " ", all), #{}).
+
+parse_json_verbosity_mapping([Entry | Rest], Mapping) ->
+ Mapping1 = case string:split(Entry, ":", leading) of
+ ["*", VerbS] ->
+ Verb = list_to_integer(VerbS),
+ Mapping#{'$REST' => Verb};
+ [LvlS, VerbS] ->
+ Lvl = list_to_atom(LvlS),
+ Verb = list_to_integer(VerbS),
+ Mapping#{Lvl => Verb}
+ end,
+ parse_json_verbosity_mapping(Rest, Mapping1);
+parse_json_verbosity_mapping([], #{'$REST' := Default} = Mapping) ->
+ DefaultMapping = lists:foldl(
+ fun(Lvl, Acc) -> Acc#{Lvl => Default} end,
+ #{}, levels()),
+ maps:merge(
+ DefaultMapping,
+ maps:remove('$REST', Mapping));
+parse_json_verbosity_mapping([], Mapping) ->
+ Mapping.
+
+-spec translate_journald_fields_conf(string(), cuttlefish_conf:conf()) ->
+ proplists:proplist().
+%% @doc
+%% Called from the Cuttlefish schema to create the actual journald handler
+%% configuration.
+
+translate_journald_fields_conf(Var, Conf) when is_list(Var) ->
+ try
+ RawFieldMapping = cuttlefish:conf_get(Var, Conf),
+ parse_journald_field_mapping(RawFieldMapping)
+ catch
+ Class:Reason:Stacktrace ->
+ ?LOG_ERROR(
+ rabbit_prelaunch_errors:format_exception(
+ Class, Reason, Stacktrace),
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ throw({configuration_translation_failure, Reason})
+ end.
+
+-spec parse_journald_field_mapping(string()) ->
+ [atom() | {atom(), atom()}].
+%% @doc
+%% Parses the journald fields pattern.
+%%
+%% The pattern is of the form: `SYSLOG_IDENTIFIER="rabbitmq-server" pid
+%% CODE_FILE=file'.
+%%
+%% `SYSLOG_IDENTIFIER="rabbitmq"' means the `SYSLOG_IDENTIFIER' field should
+%% be set to the string `rabbitmq-server'.
+%%
+%% `pid' means that field should be kept as-is.
+%%
+%% `CODE_FILE=file' means the `CODE_FILE' field should be set to the value of
+%% the `pid' field.
+
+parse_journald_field_mapping(RawMapping) ->
+ parse_journald_field_mapping(string:split(RawMapping, " ", all), []).
+
+parse_journald_field_mapping([Entry | Rest], Mapping) ->
+ Mapping1 = case string:split(Entry, "=", leading) of
+ [[$_ | _], _] ->
+ throw({bad_journald_mapping,
+ leading_underscore_forbidden,
+ Entry});
+ [Name, Value] ->
+ case re:run(Name, "^[A-Z0-9_]+$", [{capture, none}]) of
+ match ->
+ ReOpts = [{capture, all_but_first, list}],
+ case re:run(Value, "^\"(.+)\"$", ReOpts) of
+ {match, [Data]} ->
+ [{Name, Data} | Mapping];
+ nomatch ->
+ Field = list_to_atom(Value),
+ [{Name, Field} | Mapping]
+ end;
+ nomatch ->
+ throw({bad_journald_mapping,
+ name_with_invalid_characters,
+ Entry})
+ end;
+ [FieldS] ->
+ Field = list_to_atom(FieldS),
+ [Field | Mapping]
+ end,
+ parse_journald_field_mapping(Rest, Mapping1);
+parse_journald_field_mapping([], Mapping) ->
+ lists:reverse(Mapping).
+
+levels() ->
+ [debug, info, notice, warning, error, critical, alert, emergency].
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_erlang_compat.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_erlang_compat.erl
index 1e8fe2690d..eb19bbe86e 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_erlang_compat.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_erlang_compat.erl
@@ -1,25 +1,32 @@
-module(rabbit_prelaunch_erlang_compat).
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/logging.hrl").
+
-export([check/1]).
--define(OTP_MINIMUM, "21.3").
--define(ERTS_MINIMUM, "10.3").
+-define(OTP_MINIMUM, "23.2").
+-define(ERTS_MINIMUM, "11.1").
check(_Context) ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Erlang/OTP compatibility check =="),
+ ?LOG_DEBUG(
+ "~n== Erlang/OTP compatibility check ==", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ERTSVer = erlang:system_info(version),
OTPRel = rabbit_misc:otp_release(),
- rabbit_log_prelaunch:debug(
- "Requiring: Erlang/OTP ~s (ERTS ~s)", [?OTP_MINIMUM, ?ERTS_MINIMUM]),
- rabbit_log_prelaunch:debug(
- "Running: Erlang/OTP ~s (ERTS ~s)", [OTPRel, ERTSVer]),
+ ?LOG_DEBUG(
+ "Requiring: Erlang/OTP ~s (ERTS ~s)~n"
+ "Running: Erlang/OTP ~s (ERTS ~s)",
+ [?OTP_MINIMUM, ?ERTS_MINIMUM, OTPRel, ERTSVer],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
case rabbit_misc:version_compare(?ERTS_MINIMUM, ERTSVer, lte) of
true when ?ERTS_MINIMUM =/= ERTSVer ->
- rabbit_log_prelaunch:debug(
- "Erlang/OTP version requirement satisfied"),
+ ?LOG_DEBUG(
+ "Erlang/OTP version requirement satisfied", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok;
true when ?ERTS_MINIMUM =:= ERTSVer andalso ?OTP_MINIMUM =< OTPRel ->
%% When a critical regression or bug is found, a new OTP
@@ -35,7 +42,7 @@ check(_Context) ->
"This RabbitMQ version cannot run on Erlang ~s (erts ~s): "
"minimum required version is ~s (erts ~s)",
Args = [OTPRel, ERTSVer, ?OTP_MINIMUM, ?ERTS_MINIMUM],
- rabbit_log_prelaunch:error(Msg, Args),
+ ?LOG_ERROR(Msg, Args, #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
%% Also print to stderr to make this more visible
io:format(standard_error, "Error: " ++ Msg ++ "~n", Args),
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_errors.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_errors.erl
index b2cc03d069..6e74fb04a0 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_errors.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_errors.erl
@@ -1,5 +1,9 @@
-module(rabbit_prelaunch_errors).
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/logging.hrl").
+
-export([format_error/1,
format_exception/3,
log_error/1,
@@ -94,9 +98,35 @@ log_exception(Class, Exception, Stacktrace) ->
log_message(Message).
format_exception(Class, Exception, Stacktrace) ->
+ StacktraceStrs = [begin
+ case proplists:get_value(line, Props) of
+ undefined when is_list(ArgListOrArity) ->
+ io_lib:format(
+ " ~ts:~ts/~b~n"
+ " args: ~p",
+ [Mod, Fun, length(ArgListOrArity),
+ ArgListOrArity]);
+ undefined when is_integer(ArgListOrArity) ->
+ io_lib:format(
+ " ~ts:~ts/~b",
+ [Mod, Fun, ArgListOrArity]);
+ Line when is_list(ArgListOrArity) ->
+ io_lib:format(
+ " ~ts:~ts/~b, line ~b~n"
+ " args: ~p",
+ [Mod, Fun, length(ArgListOrArity), Line,
+ ArgListOrArity]);
+ Line when is_integer(ArgListOrArity) ->
+ io_lib:format(
+ " ~ts:~ts/~b, line ~b",
+ [Mod, Fun, ArgListOrArity, Line])
+ end
+ end
+ || {Mod, Fun, ArgListOrArity, Props} <- Stacktrace],
+ ExceptionStr = io_lib:format("~ts:~0p", [Class, Exception]),
rabbit_misc:format(
- "Exception during startup:~n~s",
- [lager:pr_stacktrace(Stacktrace, {Class, Exception})]).
+ "Exception during startup:~n~n~s~n~n~s",
+ [ExceptionStr, string:join(StacktraceStrs, "\n")]).
log_message(Message) ->
Lines = string:split(
@@ -105,9 +135,11 @@ log_message(Message) ->
?BOOT_FAILED_FOOTER,
[$\n],
all),
+ ?LOG_ERROR(
+ "~s", [string:join(Lines, "\n")],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
lists:foreach(
fun(Line) ->
- rabbit_log_prelaunch:error("~s", [Line]),
io:format(standard_error, "~s~n", [Line])
end, Lines),
timer:sleep(1000),
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_sighandler.erl b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_sighandler.erl
index f9a60effda..95f2916a8e 100644
--- a/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_sighandler.erl
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/src/rabbit_prelaunch_sighandler.erl
@@ -69,13 +69,13 @@ handle_event(Signal, State) ->
%% which should stop RabbitMQ.
%
%#{Signal := stop} ->
- % error_logger:info_msg(
- % "~s received - shutting down~n",
+ % logger:info(
+ % "~s received - shutting down",
% [string:uppercase(atom_to_list(Signal))]),
% ok = init:stop();
_ ->
- error_logger:info_msg(
- "~s received - unhandled signal~n",
+ logger:info(
+ "~s received - unhandled signal",
[string:uppercase(atom_to_list(Signal))])
end,
{ok, State}.
diff --git a/deps/rabbit/apps/rabbitmq_prelaunch/test/rabbit_logger_std_h_SUITE.erl b/deps/rabbit/apps/rabbitmq_prelaunch/test/rabbit_logger_std_h_SUITE.erl
new file mode 100644
index 0000000000..6f5b48aa6c
--- /dev/null
+++ b/deps/rabbit/apps/rabbitmq_prelaunch/test/rabbit_logger_std_h_SUITE.erl
@@ -0,0 +1,264 @@
+-module(rabbit_logger_std_h_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-compile(export_all).
+
+all() ->
+ [
+ {group, parallel_tests}
+ ].
+
+groups() ->
+ [
+ {parallel_tests, [parallel], [
+ every_day_rotation_is_detected,
+ every_week_rotation_is_detected,
+ every_month_rotation_is_detected,
+
+ parse_date_spec_case1,
+ parse_date_spec_case2,
+ parse_date_spec_case3,
+ parse_date_spec_case4,
+ parse_date_spec_case5,
+ parse_date_spec_case6
+ ]}
+ ].
+
+init_per_suite(_, Config) -> Config.
+end_per_suite(_, Config) -> Config.
+
+init_per_group(_, Config) -> Config.
+end_per_group(_, Config) -> Config.
+
+init_per_testcase(_, Config) -> Config.
+end_per_testcase(_, Config) -> Config.
+
+every_day_rotation_is_detected(_) ->
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2021, 01, 15}, {10, 00, 00}},
+ {{2021, 01, 15}, {11, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2021, 01, 15}, {10, 00, 00}},
+ {{2021, 01, 15}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2021, 01, 15}, {10, 00, 00}},
+ {{2021, 01, 15}, {13, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2021, 01, 15}, {11, 00, 00}},
+ {{2021, 01, 15}, {13, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2021, 01, 15}, {12, 00, 00}},
+ {{2021, 01, 15}, {13, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2021, 01, 14}, {12, 00, 00}},
+ {{2021, 01, 15}, {12, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2021, 01, 14}, {12, 00, 00}},
+ {{2021, 01, 15}, {11, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2020, 11, 15}, {12, 00, 00}},
+ {{2021, 01, 15}, {11, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => day, hour => 12},
+ {{2020, 11, 15}, {12, 00, 00}},
+ {{2021, 01, 15}, {12, 00, 00}})).
+
+every_week_rotation_is_detected(_) ->
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 11}, {12, 00, 00}},
+ {{2021, 01, 12}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 11}, {12, 00, 00}},
+ {{2021, 01, 13}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 11}, {12, 00, 00}},
+ {{2021, 01, 14}, {12, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 13}, {12, 00, 00}},
+ {{2021, 01, 14}, {12, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 14}, {12, 00, 00}},
+ {{2021, 01, 15}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 13}, {11, 00, 00}},
+ {{2021, 01, 13}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 06}, {12, 00, 00}},
+ {{2021, 01, 13}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 07}, {12, 00, 00}},
+ {{2021, 01, 14}, {12, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 06}, {12, 00, 00}},
+ {{2021, 01, 12}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 06}, {11, 00, 00}},
+ {{2021, 01, 12}, {12, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => week, day_of_week => 3, hour => 12},
+ {{2021, 01, 06}, {12, 00, 00}},
+ {{2021, 01, 13}, {11, 00, 00}})).
+
+every_month_rotation_is_detected(_) ->
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 15, hour => 12},
+ {{2021, 01, 15}, {10, 00, 00}},
+ {{2021, 01, 15}, {11, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 15, hour => 12},
+ {{2021, 01, 15}, {10, 00, 00}},
+ {{2021, 01, 15}, {12, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 15, hour => 12},
+ {{2021, 01, 13}, {12, 00, 00}},
+ {{2021, 01, 14}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 15, hour => 12},
+ {{2021, 01, 14}, {12, 00, 00}},
+ {{2021, 01, 15}, {12, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 15, hour => 12},
+ {{2021, 01, 15}, {12, 00, 00}},
+ {{2021, 01, 16}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 15, hour => 12},
+ {{2021, 01, 14}, {12, 00, 00}},
+ {{2021, 02, 14}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 15, hour => 12},
+ {{2021, 01, 16}, {12, 00, 00}},
+ {{2021, 02, 16}, {12, 00, 00}})),
+
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 30, hour => 12},
+ {{2021, 01, 29}, {12, 00, 00}},
+ {{2021, 01, 30}, {12, 00, 00}})),
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 30, hour => 12},
+ {{2021, 01, 30}, {12, 00, 00}},
+ {{2021, 01, 31}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => 30, hour => 12},
+ {{2021, 02, 27}, {12, 00, 00}},
+ {{2021, 02, 28}, {12, 00, 00}})),
+
+ ?assertNot(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => last, hour => 12},
+ {{2021, 01, 29}, {12, 00, 00}},
+ {{2021, 01, 30}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => last, hour => 12},
+ {{2021, 01, 30}, {12, 00, 00}},
+ {{2021, 01, 31}, {12, 00, 00}})),
+ ?assert(
+ rabbit_logger_std_h:is_date_based_rotation_needed(
+ #{every => month, day_of_month => last, hour => 12},
+ {{2021, 01, 30}, {12, 00, 00}},
+ {{2021, 02, 01}, {12, 00, 00}})).
+
+parse_date_spec_case1(_) ->
+ ?assertEqual(false, rabbit_logger_std_h:parse_date_spec("")).
+
+parse_date_spec_case2(_) ->
+ ?assertEqual(#{every => day, hour => 0},
+ rabbit_logger_std_h:parse_date_spec("$D0")),
+ ?assertEqual(#{every => day, hour => 16},
+ rabbit_logger_std_h:parse_date_spec("$D16")),
+ ?assertEqual(#{every => day, hour => 23},
+ rabbit_logger_std_h:parse_date_spec("$D23")).
+
+parse_date_spec_case3(_) ->
+ ?assertEqual(
+ #{every => week, day_of_week => 0, hour => 0},
+ rabbit_logger_std_h:parse_date_spec("$W0")),
+ ?assertEqual(
+ #{every => week, day_of_week => 0, hour => 23},
+ rabbit_logger_std_h:parse_date_spec("$W0D23")),
+ ?assertEqual(
+ #{every => week, day_of_week => 5, hour => 16},
+ rabbit_logger_std_h:parse_date_spec("$W5D16")).
+
+parse_date_spec_case4(_) ->
+ ?assertEqual(
+ #{every => month, day_of_month => 1, hour => 0},
+ rabbit_logger_std_h:parse_date_spec("$M1D0")),
+ ?assertEqual(
+ #{every => month, day_of_month => 5, hour => 6},
+ rabbit_logger_std_h:parse_date_spec("$M5D6")).
+
+parse_date_spec_case5(_) ->
+ ?assertEqual(
+ error,
+ rabbit_logger_std_h:parse_date_spec("INVALID")),
+ ?assertEqual(
+ error,
+ rabbit_logger_std_h:parse_date_spec("in$valid")),
+ ?assertEqual(
+ error,
+ rabbit_logger_std_h:parse_date_spec("$$D0")),
+ ?assertEqual(
+ error,
+ rabbit_logger_std_h:parse_date_spec("$D99")).
+
+parse_date_spec_case6(_) ->
+ ?assertEqual(
+ #{every => hour, minute => 30},
+ rabbit_logger_std_h:parse_date_spec("$H30")),
+ ?assertEqual(
+ #{every => hour, minute => 3},
+ rabbit_logger_std_h:parse_date_spec("$H3")),
+ ?assertEqual(
+ #{day_of_week => 0,every => week,hour => 0, minute => 30},
+ rabbit_logger_std_h:parse_date_spec("$W0H30")).
diff --git a/deps/rabbit/bats.bzl b/deps/rabbit/bats.bzl
new file mode 100644
index 0000000000..b86e04c790
--- /dev/null
+++ b/deps/rabbit/bats.bzl
@@ -0,0 +1,36 @@
+def _impl(ctx):
+ script = """set -euo pipefail
+
+external/bats/libexec/bats {test_files}
+""".format(
+ package_dir = ctx.label.package,
+ test_files = " ".join([t.short_path for t in ctx.files.srcs]),
+ )
+
+ ctx.actions.write(
+ output = ctx.outputs.executable,
+ content = script,
+ )
+
+ runfiles = ctx.runfiles(ctx.files.bats + ctx.files.srcs + ctx.files.data)
+ return [DefaultInfo(runfiles = runfiles)]
+
+bats_test = rule(
+ implementation = _impl,
+ attrs = {
+ "bats": attr.label(),
+ "srcs": attr.label_list(
+ allow_files = [".bats"],
+ mandatory = True,
+ ),
+ "data": attr.label_list(allow_files = True),
+ },
+ test = True,
+)
+
+def bats(**kwargs):
+ bats_test(
+ name = "bats",
+ bats = "@bats//:bin_dir",
+ **kwargs
+ )
diff --git a/deps/rabbit/docs/rabbitmq-diagnostics.8 b/deps/rabbit/docs/rabbitmq-diagnostics.8
index 7a6d53d097..5b789680db 100644
--- a/deps/rabbit/docs/rabbitmq-diagnostics.8
+++ b/deps/rabbit/docs/rabbitmq-diagnostics.8
@@ -1,11 +1,11 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
-.Dd September 28, 2019
+.Dd June 19, 2021
.Dt RABBITMQ-DIAGNOSTICS 8
.Os "RabbitMQ Server"
.Sh NAME
diff --git a/deps/rabbit/docs/rabbitmq-echopid.8 b/deps/rabbit/docs/rabbitmq-echopid.8
index f51dab854c..2bca078cfa 100644
--- a/deps/rabbit/docs/rabbitmq-echopid.8
+++ b/deps/rabbit/docs/rabbitmq-echopid.8
@@ -1,9 +1,9 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
.Dd September 28, 2019
.Dt RABBITMQ-ECHOPID.BAT 8
diff --git a/deps/rabbit/docs/rabbitmq-env.conf.5 b/deps/rabbit/docs/rabbitmq-env.conf.5
index b1bb26281b..76eb7048ed 100644
--- a/deps/rabbit/docs/rabbitmq-env.conf.5
+++ b/deps/rabbit/docs/rabbitmq-env.conf.5
@@ -1,9 +1,9 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
.Dd September 28, 2019
.Dt RABBITMQ-ENV.CONF 5
diff --git a/deps/rabbit/docs/rabbitmq-plugins.8 b/deps/rabbit/docs/rabbitmq-plugins.8
index 4cec8cfded..563f9dee88 100644
--- a/deps/rabbit/docs/rabbitmq-plugins.8
+++ b/deps/rabbit/docs/rabbitmq-plugins.8
@@ -1,9 +1,9 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
.Dd September 28, 2019
.Dt RABBITMQ-PLUGINS 8
diff --git a/deps/rabbit/docs/rabbitmq-queues.8 b/deps/rabbit/docs/rabbitmq-queues.8
index a0bc41a19c..c0d4c2402a 100644
--- a/deps/rabbit/docs/rabbitmq-queues.8
+++ b/deps/rabbit/docs/rabbitmq-queues.8
@@ -1,11 +1,11 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
-.Dd September 28, 2019
+.Dd June 19, 2021
.Dt RABBITMQ-QUEUES 8
.Os "RabbitMQ Server"
.Sh NAME
@@ -27,11 +27,11 @@
.\" ------------------------------------------------------------------
.Nm
is a command line tool that provides commands used to manage queues,
-mainly member handling for quorum queues.
+for example, grow, shrink or rebalance replicas of replicated queue types.
See the
.Lk https://www.rabbitmq.com/quorum-queues.html "RabbitMQ quorum queues guide"
-and
-.Lk https://www.rabbitmq.com/ha.html "RabbitMQ classic mirrored queues guide"
+and the general
+.Lk https://www.rabbitmq.com/queues.html "RabbitMQ queues guide"
to learn more about queue types in RabbitMQ.
.
.\" ------------------------------------------------------------------
@@ -115,7 +115,7 @@ Example:
.\" ------------------------------------
.It Cm rebalance Ar type Fl -vhost-pattern Ar pattern Fl -queue-pattern Ar pattern
.Pp
-Rebalances queue master replicas across cluster nodes.
+Rebalances queue leader replicas across cluster nodes.
.Pp
Supported
.Ar type
diff --git a/deps/rabbit/docs/rabbitmq-server.8 b/deps/rabbit/docs/rabbitmq-server.8
index 6a5e411cb3..dba7ca4479 100644
--- a/deps/rabbit/docs/rabbitmq-server.8
+++ b/deps/rabbit/docs/rabbitmq-server.8
@@ -1,9 +1,9 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
.Dd September 28, 2019
.Dt RABBITMQ-SERVER 8
diff --git a/deps/rabbit/docs/rabbitmq-server.service.example b/deps/rabbit/docs/rabbitmq-server.service.example
index dec70eb635..69531b1ff6 100644
--- a/deps/rabbit/docs/rabbitmq-server.service.example
+++ b/deps/rabbit/docs/rabbitmq-server.service.example
@@ -5,6 +5,19 @@ After=network.target epmd@0.0.0.0.socket
Wants=network.target epmd@0.0.0.0.socket
[Service]
+# Note: You *may* wish to uncomment the following lines to apply systemd
+# hardening effort to RabbitMQ, to prevent your system from being illegally
+# modified by undiscovered vulnerabilities in RabbitMQ.
+# ProtectSystem=full
+# ProtectHome=true
+# PrivateDevices=true
+# ProtectHostname=true
+# ProtectClock=true
+# ProtectKernelTunables=true
+# ProtectKernelModules=true
+# ProtectKernelLogs=true
+# ProtectControlGroups=true
+# RestrictRealtime=true
Type=notify
User=rabbitmq
Group=rabbitmq
diff --git a/deps/rabbit/docs/rabbitmq-service.8 b/deps/rabbit/docs/rabbitmq-service.8
index 154388fcfc..2f33d9d4e9 100644
--- a/deps/rabbit/docs/rabbitmq-service.8
+++ b/deps/rabbit/docs/rabbitmq-service.8
@@ -1,9 +1,9 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
.Dd September 28, 2019
.Dt RABBITMQ-SERVICE.BAT 8
diff --git a/deps/rabbit/docs/rabbitmq-upgrade.8 b/deps/rabbit/docs/rabbitmq-upgrade.8
index 4fe7283f13..c4d4bd16fa 100644
--- a/deps/rabbit/docs/rabbitmq-upgrade.8
+++ b/deps/rabbit/docs/rabbitmq-upgrade.8
@@ -1,11 +1,11 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
-.Dd September 28, 2019
+.Dd June 19, 2021
.Dt RABBITMQ-UPGRADE 8
.Os "RabbitMQ Server"
.Sh NAME
@@ -92,6 +92,27 @@ Displays general help and commands supported by
.It Cm post_upgrade
.Pp
Runs post-upgrade tasks. In the current version, it performs the rebalance of mirrored and quorum queues across all nodes in the cluster.
+.\" ------------------------------------
+.It Cm await_online_quorum_plus_one
+.Pp
+Waits for all quorum queues to have an above minimum online quorum.
+This makes sure that no queues would lose their quorum if the target node is shut down.
+.\" ------------------------------------
+.It Cm drain
+.Pp
+Puts the node in maintenance mode. Such nodes will not serve any
+client traffic or considered for hosting any queue leader replicas.
+.Pp
+To learn more, see the
+.Lk https://www.rabbitmq.com/upgrade.html#maintenance-mode "RabbitMQ Upgrade guide"
+.\" ------------------------------------
+.It Cm revive
+.Pp
+Puts the node out of maintenance and into regular operating mode.
+Such nodes will again serve client traffic and considered for queue leader replica placement.
+.Pp
+To learn more, see the
+.Lk https://www.rabbitmq.com/upgrade.html#maintenance-mode "RabbitMQ Upgrade guide"
.\" ------------------------------------------------------------------
.Sh SEE ALSO
.\" ------------------------------------------------------------------
diff --git a/deps/rabbit/docs/rabbitmq.conf.example b/deps/rabbit/docs/rabbitmq.conf.example
index 17e023e62c..dfd4268f4f 100644
--- a/deps/rabbit/docs/rabbitmq.conf.example
+++ b/deps/rabbit/docs/rabbitmq.conf.example
@@ -1,9 +1,24 @@
+## This example configuration file demonstrates various settings
+## available via rabbitmq.conf. It primarily focuses core broker settings
+## but some tier 1 plugin settings are also covered.
+##
+## This file is AN EXAMPLE. It is NOT MEANT TO BE USED IN PRODUCTION. Instead of
+## copying the entire (large!) file, create or generate a new rabbitmq.conf for the target system
+## and populate it with the necessary settings.
+##
+## See https://rabbitmq.com/configure.html to learn about how to configure RabbitMQ,
+## the ini-style format used by rabbitmq.conf, how it is different from `advanced.config`,
+## how to verify effective configuration, and so on.
+##
+## See https://rabbitmq.com/documentation.html for the rest of RabbitMQ documentation.
+##
+## In case you have questions, please use RabbitMQ community Slack and the rabbitmq-users Google group
+## instead of GitHub issues.
+
# ======================================
-# RabbitMQ broker section
+# Core broker section
# ======================================
-## Related doc guide: https://rabbitmq.com/configure.html. See
-## https://rabbitmq.com/documentation.html for documentation ToC.
## Networking
## ====================
@@ -85,6 +100,8 @@
##
## Related doc guide: https://rabbitmq.com/ssl.html.
##
+# listeners.ssl.1 = 5671
+#
# ssl_options.verify = verify_peer
# ssl_options.fail_if_no_peer_cert = false
# ssl_options.cacertfile = /path/to/cacert.pem
@@ -93,7 +110,41 @@
#
# ssl_options.honor_cipher_order = true
# ssl_options.honor_ecc_order = true
-
+#
+## These are highly recommended for TLSv1.2 but cannot be used
+## with TLSv1.3. If TLSv1.3 is enabled, these lines MUST be removed.
+# ssl_options.client_renegotiation = false
+# ssl_options.secure_renegotiate = true
+#
+## Limits what TLS versions the server enables for client TLS
+## connections. See https://www.rabbitmq.com/ssl.html#tls-versions for details.
+##
+## Cutting edge TLS version which requires recent client runtime
+## versions and has no cipher suite in common with earlier TLS versions.
+# ssl_options.versions.1 = tlsv1.3
+## Enables TLSv1.2 for best compatibility
+# ssl_options.versions.2 = tlsv1.2
+## Older TLS versions have known vulnerabilities and are being phased out
+## from wide use.
+
+## Limits what cipher suites the server will use for client TLS
+## connections. Narrowing this down can prevent some clients
+## from connecting.
+## If TLSv1.3 is enabled and cipher suites are overridden, TLSv1.3-specific
+## cipher suites must also be explicitly enabled.
+## See https://www.rabbitmq.com/ssl.html#cipher-suites and https://wiki.openssl.org/index.php/TLS1.3#Ciphersuites
+## for details.
+#
+## The example below uses TLSv1.3 cipher suites only
+#
+# ssl_options.ciphers.1 = TLS_AES_256_GCM_SHA384
+# ssl_options.ciphers.2 = TLS_AES_128_GCM_SHA256
+# ssl_options.ciphers.3 = TLS_CHACHA20_POLY1305_SHA256
+# ssl_options.ciphers.4 = TLS_AES_128_CCM_SHA256
+# ssl_options.ciphers.5 = TLS_AES_128_CCM_8_SHA256
+#
+## The example below uses TLSv1.2 cipher suites only
+#
# ssl_options.ciphers.1 = ECDHE-ECDSA-AES256-GCM-SHA384
# ssl_options.ciphers.2 = ECDHE-RSA-AES256-GCM-SHA384
# ssl_options.ciphers.3 = ECDHE-ECDSA-AES256-SHA384
@@ -131,6 +182,8 @@
# ssl_options.ciphers.35 = ECDH-ECDSA-AES128-SHA
# ssl_options.ciphers.36 = ECDH-RSA-AES128-SHA
+# ssl_options.bypass_pem_cache = true
+
## Select an authentication/authorisation backend to use.
##
## Alternative backends are provided by plugins, such as rabbitmq-auth-backend-ldap.
@@ -263,7 +316,14 @@
## =====================================================
##
-## Set the default AMQP 0-9-1 heartbeat interval (in seconds).
+## Set the server AMQP 0-9-1 heartbeat timeout in seconds.
+## RabbitMQ nodes will send heartbeat frames at roughly
+## the (timeout / 2) interval. Two missed heartbeats from
+## a client will close its connection.
+##
+## Values lower than 6 seconds are very likely to produce
+## false positives and are not recommended.
+##
## Related doc guides:
##
## * https://rabbitmq.com/heartbeats.html
@@ -389,6 +449,10 @@
##
# cluster_partition_handling = ignore
+## Pauses all nodes on the minority side of a partition. The cluster
+## MUST have an odd number of nodes (3, 5, etc)
+# cluster_partition_handling = pause_minority
+
## pause_if_all_down strategy require additional configuration
# cluster_partition_handling = pause_if_all_down
@@ -527,6 +591,13 @@
## on Windows.
# motd_file = /etc/rabbitmq/motd
+## Consumer timeout
+## If a message delivered to a consumer has not been acknowledge before this timer
+## triggers the channel will be force closed by the broker. This ensure that
+## faultly consumers that never ack will not hold on to messages indefinitely.
+##
+# consumer_timeout = 900000
+
## ----------------------------------------------------------------------------
## Advanced Erlang Networking/Clustering Options.
##
@@ -591,12 +662,14 @@
## More TLS options
# management.ssl.honor_cipher_order = true
# management.ssl.honor_ecc_order = true
+
+## These are highly recommended for TLSv1.2 but cannot be used
+## with TLSv1.3. If TLSv1.3 is enabled, these lines MUST be removed.
# management.ssl.client_renegotiation = false
# management.ssl.secure_renegotiate = true
## Supported TLS versions
# management.ssl.versions.1 = tlsv1.2
-# management.ssl.versions.2 = tlsv1.1
## Cipher suites the server is allowed to use
# management.ssl.ciphers.1 = ECDHE-ECDSA-AES256-GCM-SHA384
@@ -836,7 +909,7 @@
## Logging settings.
##
-## See https://rabbitmq.com/logging.html and https://github.com/erlang-lager/lager for details.
+## See https://rabbitmq.com/logging.html for details.
##
## Log directory, taken from the RABBITMQ_LOG_BASE env variable by default.
diff --git a/deps/rabbit/docs/rabbitmqctl.8 b/deps/rabbit/docs/rabbitmqctl.8
index 3e041ad2c8..5adc7e24f6 100644
--- a/deps/rabbit/docs/rabbitmqctl.8
+++ b/deps/rabbit/docs/rabbitmqctl.8
@@ -1,11 +1,11 @@
.\" vim:ft=nroff:
-.\" This Source Code Form is subject to the terms of the Mozilla Public
+.\" This Source Code Form is subject to the terms of the Mozilla Public
.\" License, v. 2.0. If a copy of the MPL was not distributed with this
.\" file, You can obtain one at https://mozilla.org/MPL/2.0/.
.\"
-.\" Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+.\" Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
.\"
-.Dd September 28, 2019
+.Dd June 19, 2021
.Dt RABBITMQCTL 8
.Os "RabbitMQ Server"
.Sh NAME
@@ -28,7 +28,13 @@
RabbitMQ is an open source multi-protocol messaging broker.
.Pp
.Nm
-is a command line tool for managing a RabbitMQ server node.
+is the main command line tool for managing a RabbitMQ server node,
+together with
+.Cm rabbitmq-diagnostics
+,
+.Cm rabbitmq-upgrade
+, and others.
+.Pp
It performs all actions by connecting to the target RabbitMQ node
on a dedicated CLI tool communication port and authenticating
using a shared secret (known as the cookie file).
@@ -150,20 +156,14 @@ For example, to reset the RabbitMQ node:
.Pp
Instructs the RabbitMQ node to perform internal log rotation.
.Pp
-Log rotation is performed according to lager settings specified in
-configuration file.
+Log rotation is performed according to the logging settings specified in the configuration file.
+The rotation operation is asynchronous, there is no guarantee that it has completed when this command returns.
.Pp
-Note that there is no need to call this command in case of external log
-rotation (e.g. from logrotate(8)), because lager detects renames and
-automatically reopens log files.
+Note that there is no need to call this command in case of external log rotation (e.g. from logrotate(8)).
.Pp
-For example, this command starts internal log rotation
-process:
+For example, to initial log rotation:
.sp
.Dl rabbitmqctl rotate_logs
-.Pp
-Rotation is performed asynchronously, so there is no guarantee that it
-will be completed when this command returns.
.\" ------------------------------------------------------------------
.It Cm shutdown
.Pp
@@ -338,7 +338,7 @@ will be lost.
If the last node to go down is permanently lost then you should use
.Cm forget_cluster_node Fl -offline
in preference to this command, as it will ensure that mirrored queues
-which were mastered on the lost node get promoted.
+which had their leader replica on the lost node get promoted.
.Pp
For example, this will force the node not to wait for other nodes next
time it is started:
@@ -720,7 +720,7 @@ is an administrator:
.sp
.Dl rabbitmqctl set_user_tags janeway administrator
.Pp
-This has no effect when the user logs in via AMQP, but can be used to
+This has no effect when the user authenticates using a messaging protocol, but can be used to
permit the user to manage users, virtual hosts and permissions when
the user logs in via some other means (for example with the management
plugin).
@@ -846,7 +846,7 @@ virtual hosts to which the user named
.Qq janeway
has been granted access, and the topic permissions the user has in these virtual hosts:
.sp
-.Dl rabbitmqctl list_topic_user_permissions janeway
+.Dl rabbitmqctl list_user_topic_permissions janeway
.\" ------------------------------------------------------------------
.It Cm list_vhosts Op Ar vhostinfoitem ...
.Pp
@@ -1289,11 +1289,11 @@ of the following mutually exclusive options:
.Bl -tag -width Ds
.It Fl -offline
List only those durable queues that are not currently available (more
-specifically, their master node isn't).
+specifically, their leader node isn't).
.It Fl -online
-List queues that are currently available (their master node is).
+List queues that are currently available (their leader node is).
.It Fl -local
-List only those queues whose master process is located on the current
+List only those queues whose leader replica is located on the current
node.
.El
.Pp
@@ -1392,7 +1392,7 @@ To learn more, see the
.Lk https://www.rabbitmq.com/ha.html "RabbitMQ Mirroring guide"
.It Cm synchronised_slave_pids
If the queue is mirrored, this gives the IDs of the mirrors (follower replicas) which
-are synchronised with the master (leader). To learn more, see the
+are in sync with the leader replica. To learn more, see the
.Lk https://www.rabbitmq.com/ha.html "RabbitMQ Mirroring guide"
.It Cm state
The state of the queue.
@@ -1424,8 +1424,8 @@ each queue of the virtual host named
.Pp
Tests queues to respond within timeout. Lists those which did not respond
.Pp
-For example, this command lists only those unresponsive queues whose master process
-is located on the current node.
+For example, this command lists only those unresponsive queues whose leader replica
+is hosted on the target node.
.Sp
.Dl rabbitmqctl list_unresponsive_queues --local name
.\" ------------------------------------------------------------------
@@ -1912,7 +1912,7 @@ none
.Pp
Example:
.Sp
-.Dl rabbitmqctl log_level debug
+.Dl rabbitmqctl set_log_level debug
.\" ------------------------------------------------------------------
.It Cm set_vm_memory_high_watermark Ar fraction
.Bl -tag -width Ds
@@ -1957,6 +1957,10 @@ Enables a feature flag on the target node.
Example:
.Sp
.Dl rabbitmqctl enable_feature_flag quorum_queue
+.Pp
+You can also enable all feature flags by specifying "all":
+.Sp
+.Dl rabbitmqctl enable_feature_flag "all"
.\" ------------------------------------------------------------------
.It Cm list_feature_flags Op Ar column ...
.Pp
diff --git a/deps/rabbit/erlang.mk b/deps/rabbit/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbit/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbit/include/gm_specs.hrl b/deps/rabbit/include/gm_specs.hrl
index 2a16c862c4..1e7282988a 100644
--- a/deps/rabbit/include/gm_specs.hrl
+++ b/deps/rabbit/include/gm_specs.hrl
@@ -2,14 +2,9 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-type callback_result() :: 'ok' | {'stop', any()} | {'become', atom(), args()}.
-type args() :: any().
-type members() :: [pid()].
-
--spec joined(args(), members()) -> callback_result().
--spec members_changed(args(), members(),members()) -> callback_result().
--spec handle_msg(args(), pid(), any()) -> callback_result().
--spec handle_terminate(args(), term()) -> any().
diff --git a/deps/rabbit/include/rabbit_global_counters.hrl b/deps/rabbit/include/rabbit_global_counters.hrl
new file mode 100644
index 0000000000..f4eac1268e
--- /dev/null
+++ b/deps/rabbit/include/rabbit_global_counters.hrl
@@ -0,0 +1,2 @@
+-define(NUM_PROTOCOL_COUNTERS, 8).
+-define(NUM_PROTOCOL_QUEUE_TYPE, 8).
diff --git a/deps/rabbit/priv/schema/rabbit.schema b/deps/rabbit/priv/schema/rabbit.schema
index 518403c20d..1537639b10 100644
--- a/deps/rabbit/priv/schema/rabbit.schema
+++ b/deps/rabbit/priv/schema/rabbit.schema
@@ -121,18 +121,139 @@ end}.
%% Definition import
%%
-%% Load definitions from a JSON file or directory of files. See
+%% Original key for definition loading from a JSON file or directory of files. See
%% https://www.rabbitmq.com/management.html#load-definitions
-%%
-%% {load_definitions, "/path/to/schema.json"},
-%% {load_definitions, "/path/to/schemas"},
{mapping, "load_definitions", "rabbit.load_definitions",
[{datatype, string},
{validators, ["file_accessible"]}]}.
+%% Newer syntax for definition loading from a JSON file or directory of files. See
+%% https://www.rabbitmq.com/management.html#load-definitions
+{mapping, "definitions.local.path", "rabbit.definitions.local_path",
+ [{datatype, string},
+ {validators, ["file_accessible"]}]}.
+
+%% Extensive mechanism for loading definitions from a remote source
+{mapping, "definitions.import_backend", "rabbit.definitions.import_backend", [
+ {datatype, atom}
+]}.
+
+{translation, "rabbit.definitions.import_backend",
+fun(Conf) ->
+ case cuttlefish:conf_get("definitions.import_backend", Conf, rabbit_definitions_import_local_filesystem) of
+ %% short aliases for known backends
+ local_filesystem -> rabbit_definitions_import_local_filesystem;
+ local -> rabbit_definitions_import_local_filesystem;
+ https -> rabbit_definitions_import_https;
+ http -> rabbit_definitions_import_https;
+ %% accept both rabbitmq_ and rabbit_ (typical core module prefix)
+ rabbitmq_definitions_import_local_filesystem -> rabbit_definitions_import_local_filesystem;
+ rabbitmq_definitions_import_local_filesystem -> rabbit_definitions_import_https;
+ %% any other value is used as is
+ Module -> Module
+ end
+end}.
+
+%% Load definitions from a remote URL over HTTPS. See
+%% https://www.rabbitmq.com/management.html#load-definitions
+{mapping, "definitions.https.url", "rabbit.definitions.url",
+ [{datatype, string}]}.
+
+%% Client-side TLS settings used by e.g. HTTPS definition loading mechanism.
+%% These can be reused by other clients.
+
+{mapping, "definitions.tls.verify", "rabbit.definitions.ssl_options.verify", [
+ {datatype, {enum, [verify_peer, verify_none]}}]}.
+
+{mapping, "definitions.tls.fail_if_no_peer_cert", "rabbit.definitions.ssl_options.fail_if_no_peer_cert", [
+ {datatype, {enum, [true, false]}}]}.
+
+{mapping, "definitions.tls.cacertfile", "rabbit.definitions.ssl_options.cacertfile",
+ [{datatype, string}, {validators, ["file_accessible"]}]}.
+
+{mapping, "definitions.tls.certfile", "rabbit.definitions.ssl_options.certfile",
+ [{datatype, string}, {validators, ["file_accessible"]}]}.
+
+{mapping, "definitions.tls.cacerts.$name", "rabbit.definitions.ssl_options.cacerts",
+ [{datatype, string}]}.
+
+{translation, "rabbit.definitions.ssl_options.cacerts",
+fun(Conf) ->
+ Settings = cuttlefish_variable:filter_by_prefix("definitions.tls.cacerts", Conf),
+ [ list_to_binary(V) || {_, V} <- Settings ]
+end}.
+
+{mapping, "definitions.tls.cert", "rabbit.definitions.ssl_options.cert",
+ [{datatype, string}]}.
+
+{translation, "rabbit.definitions.ssl_options.cert",
+fun(Conf) ->
+ list_to_binary(cuttlefish:conf_get("definitions.tls.cert", Conf))
+end}.
+
+{mapping, "definitions.tls.reuse_session", "rabbit.definitions.ssl_options.reuse_session",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "definitions.tls.crl_check", "rabbit.definitions.ssl_options.crl_check",
+ [{datatype, [{enum, [true, false, peer, best_effort]}]}]}.
+
+{mapping, "definitions.tls.depth", "rabbit.definitions.ssl_options.depth",
+ [{datatype, integer}, {validators, ["byte"]}]}.
+
+{mapping, "definitions.tls.dh", "rabbit.definitions.ssl_options.dh",
+ [{datatype, string}]}.
+
+{translation, "rabbit.definitions.ssl_options.dh",
+fun(Conf) ->
+ list_to_binary(cuttlefish:conf_get("definitions.tls.dh", Conf))
+end}.
+
+{translation, "rabbit.definitions.ssl_options.key",
+fun(Conf) ->
+ case cuttlefish_variable:filter_by_prefix("definitions.tls.key", Conf) of
+ [{[_,_,Key], Val}|_] -> {list_to_atom(Key), list_to_binary(Val)};
+ _ -> cuttlefish:unset()
+ end
+end}.
+
+{mapping, "definitions.tls.keyfile", "rabbit.definitions.ssl_options.keyfile",
+ [{datatype, string}, {validators, ["file_accessible"]}]}.
+
+{mapping, "definitions.tls.log_alert", "rabbit.definitions.ssl_options.log_alert",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "definitions.tls.password", "rabbit.definitions.ssl_options.password",
+ [{datatype, string}]}.
+
+{mapping, "definitions.tls.secure_renegotiate", "rabbit.definitions.ssl_options.secure_renegotiate",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "definitions.tls.reuse_sessions", "rabbit.definitions.ssl_options.reuse_sessions",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "definitions.tls.versions.$version", "rabbit.definitions.ssl_options.versions",
+ [{datatype, atom}]}.
+
+{translation, "rabbit.definitions.ssl_options.versions",
+fun(Conf) ->
+ Settings = cuttlefish_variable:filter_by_prefix("definitions.tls.versions", Conf),
+ [V || {_, V} <- Settings]
+end}.
+
+{mapping, "definitions.tls.ciphers.$cipher", "rabbit.definitions.ssl_options.ciphers",
+ [{datatype, string}]}.
+
+{translation, "rabbit.definitions.ssl_options.ciphers",
+fun(Conf) ->
+ Settings = cuttlefish_variable:filter_by_prefix("definitions.tls.ciphers", Conf),
+ lists:reverse([V || {_, V} <- Settings])
+end}.
+
+{mapping, "definitions.tls.log_level", "rabbit.definitions.ssl_options.log_level",
+ [{datatype, {enum, [emergency, alert, critical, error, warning, notice, info, debug]}}]}.
+
%%
-%% Security / AAA
-%% ==============
+%% Seed User, Authentication, Access Control
%%
%% The default "guest" user is only permitted to access the server
@@ -254,13 +375,16 @@ end}.
fun(Conf) ->
case cuttlefish_variable:filter_by_prefix("ssl_options.key", Conf) of
[{[_,_,Key], Val}|_] -> {list_to_atom(Key), list_to_binary(Val)};
- _ -> undefined
+ _ -> cuttlefish:unset()
end
end}.
{mapping, "ssl_options.keyfile", "rabbit.ssl_options.keyfile",
[{datatype, string}, {validators, ["file_accessible"]}]}.
+{mapping, "ssl_options.log_level", "rabbit.ssl_options.log_level",
+ [{datatype, {enum, [emergency, alert, critical, error, warning, notice, info, debug]}}]}.
+
{mapping, "ssl_options.log_alert", "rabbit.ssl_options.log_alert",
[{datatype, {enum, [true, false]}}]}.
@@ -294,6 +418,9 @@ fun(Conf) ->
lists:reverse([V || {_, V} <- Settings])
end}.
+{mapping, "ssl_options.bypass_pem_cache", "ssl.bypass_pem_cache",
+ [{datatype, {enum, [true, false]}}]}.
+
%% ===========================================================================
%% Choose the available SASL mechanism(s) to expose.
@@ -616,7 +743,7 @@ end}.
{mapping, "max_message_size", "rabbit.max_message_size",
- [{datatype, integer}, {validators, ["less_then_512MB"]}]}.
+ [{datatype, integer}, {validators, ["max_message_size"]}]}.
%% Customising Socket Options.
%%
@@ -892,7 +1019,24 @@ end}.
%% {mirroring_sync_batch_size, 4096},
{mapping, "mirroring_sync_batch_size", "rabbit.mirroring_sync_batch_size",
- [{datatype, bytesize}, {validators, ["size_less_than_2G"]}]}.
+ [{datatype, bytesize}, {validators, ["mirroring_sync_batch_size"]}]}.
+
+%% Mirror sync max throughput (in bytes) per second.
+%% Supported unit symbols:
+%% k, kiB: kibibytes (2^10 - 1,024 bytes)
+%% M, MiB: mebibytes (2^20 - 1,048,576 bytes)
+%% G, GiB: gibibytes (2^30 - 1,073,741,824 bytes)
+%% kB: kilobytes (10^3 - 1,000 bytes)
+%% MB: megabytes (10^6 - 1,000,000 bytes)
+%% GB: gigabytes (10^9 - 1,000,000,000 bytes)
+%%
+%% 0 means "no limit".
+%%
+%% {mirroring_sync_max_throughput, 0},
+
+{mapping, "mirroring_sync_max_throughput", "rabbit.mirroring_sync_max_throughput", [
+ {datatype, [integer, string]}
+]}.
%% Peer discovery backend used by cluster formation.
%%
@@ -942,11 +1086,11 @@ fun(Conf) ->
end}.
%% Cluster formation: Randomized startup delay
+%%
+%% DEPRECATED: This is a no-op. Old configs are still allowed, but a warning will be printed.
-{mapping, "cluster_formation.randomized_startup_delay_range.min", "rabbit.cluster_formation.randomized_startup_delay_range",
- [{datatype, integer}]}.
-{mapping, "cluster_formation.randomized_startup_delay_range.max", "rabbit.cluster_formation.randomized_startup_delay_range",
- [{datatype, integer}]}.
+{mapping, "cluster_formation.randomized_startup_delay_range.min", "rabbit.cluster_formation.randomized_startup_delay_range", []}.
+{mapping, "cluster_formation.randomized_startup_delay_range.max", "rabbit.cluster_formation.randomized_startup_delay_range", []}.
{translation, "rabbit.cluster_formation.randomized_startup_delay_range",
fun(Conf) ->
@@ -954,19 +1098,25 @@ fun(Conf) ->
Max = cuttlefish:conf_get("cluster_formation.randomized_startup_delay_range.max", Conf, undefined),
case {Min, Max} of
- {undefined, undefined} ->
- cuttlefish:unset();
- {undefined, Max} ->
- %% fallback default
- {5, Max};
- {Min, undefined} ->
- %% fallback default
- {Min, 60};
- {Min, Max} ->
- {Min, Max}
- end
+ {undefined, undefined} ->
+ ok;
+ _ ->
+ cuttlefish:warn("cluster_formation.randomized_startup_delay_range.min and "
+ "cluster_formation.randomized_startup_delay_range.max are deprecated")
+ end,
+ cuttlefish:unset()
end}.
+%% Cluster formation: lock acquisition retries as passed to https://erlang.org/doc/man/global.html#set_lock-3
+%%
+%% Currently used in classic, k8s, and aws peer discovery backends.
+
+{mapping, "cluster_formation.internal_lock_retries", "rabbit.cluster_formation.internal_lock_retries",
+ [
+ {datatype, integer},
+ {validators, ["non_zero_positive_integer"]}
+ ]}.
+
%% Cluster formation: discovery failure retries
{mapping, "cluster_formation.lock_retry_limit", "rabbit.cluster_formation.lock_retry_limit",
@@ -991,6 +1141,18 @@ end}.
{validators, ["non_zero_positive_integer"]}
]}.
+
+%% Target cluster size hint may be used by certain core features or plugins to perform
+%% actions that should only be performed when a certain number of nodes (or a quorum of a certain number)
+%% has already joined (started).
+%%
+
+{mapping, "cluster_formation.target_cluster_size_hint", "rabbit.cluster_formation.target_cluster_size_hint", [
+ {datatype, integer},
+ {validators, ["non_zero_positive_integer"]}
+]}.
+
+
%% Classic config-driven peer discovery backend.
%%
%% Make clustering happen *automatically* at startup - only applied
@@ -1149,9 +1311,9 @@ end}.
[{datatype, {enum, [stop_node, continue, transient, persistent]}}]}.
%% Approximate maximum time a consumer can spend processing a message before
-%% the channel is terminated, in milliseconds. Default is no timeout.
+%% the channel is terminated, in milliseconds.
%%
-%% {consumer_timeout, 10000},
+%% {consumer_timeout, 1800000},
{mapping, "consumer_timeout", "rabbit.consumer_timeout", [
{datatype, integer},
@@ -1176,10 +1338,10 @@ end}.
]}.
% ==========================
-% Lager section
+% Logging section
% ==========================
-{mapping, "log.dir", "lager.log_root", [
+{mapping, "log.dir", "rabbit.log_root", [
{datatype, string},
{validators, ["dir_writable"]}]}.
@@ -1189,6 +1351,78 @@ end}.
{mapping, "log.console.level", "rabbit.log.console.level", [
{datatype, {enum, [debug, info, notice, warning, error, critical, alert, emergency, none]}}
]}.
+{mapping, "log.console.stdio", "rabbit.log.console.stdio", [
+ {default, stdout},
+ {datatype, {enum, [stdout, stderr]}}
+]}.
+{mapping, "log.console.use_colors", "rabbit.log.console.formatter", [
+ {default, on},
+ {datatype, flag}
+]}.
+{mapping, "log.console.color_esc_seqs.debug", "rabbit.log.console.formatter", [
+ {default, "\033[38;5;246m"},
+ {datatype, string}
+]}.
+{mapping, "log.console.color_esc_seqs.info", "rabbit.log.console.formatter", [
+ {default, ""},
+ {datatype, string}
+]}.
+{mapping, "log.console.color_esc_seqs.notice", "rabbit.log.console.formatter", [
+ {default, "\033[38;5;87m"},
+ {datatype, string}
+]}.
+{mapping, "log.console.color_esc_seqs.warning", "rabbit.log.console.formatter", [
+ {default, "\033[38;5;214m"},
+ {datatype, string}
+]}.
+{mapping, "log.console.color_esc_seqs.error", "rabbit.log.console.formatter", [
+ {default, "\033[38;5;160m"},
+ {datatype, string}
+]}.
+{mapping, "log.console.color_esc_seqs.critical", "rabbit.log.console.formatter", [
+ {default, "\033[1;37m\033[48;5;20m"},
+ {datatype, string}
+]}.
+{mapping, "log.console.color_esc_seqs.alert", "rabbit.log.console.formatter", [
+ {default, "\033[1;37m\033[48;5;93m"},
+ {datatype, string}
+]}.
+{mapping, "log.console.color_esc_seqs.emergency", "rabbit.log.console.formatter", [
+ {default, "\033[1;37m\033[48;5;196m"},
+ {datatype, string}
+]}.
+{mapping, "log.console.formatter", "rabbit.log.console.formatter", [
+ {default, plaintext},
+ {datatype, {enum, [plaintext, json]}}
+]}.
+{mapping, "log.console.formatter.time_format", "rabbit.log.console.formatter", [
+ {default, rfc3339_space},
+ {datatype, {enum, [rfc3339_space, rfc3339_T, epoch_usecs, epoch_secs, lager_default]}}
+]}.
+{mapping, "log.console.formatter.level_format", "rabbit.log.console.formatter", [
+ {default, lc},
+ {datatype, {enum, [lc, uc, lc3, uc3, lc4, uc4]}}
+]}.
+{mapping, "log.console.formatter.single_line", "rabbit.log.console.formatter", [
+ {default, off},
+ {datatype, flag}
+]}.
+{mapping, "log.console.formatter.plaintext.format", "rabbit.log.console.formatter", [
+ {default, "$time [$level] $pid $msg"},
+ {datatype, string}
+]}.
+{mapping, "log.console.formatter.json.field_map", "rabbit.log.console.formatter", [
+ {default, "time level msg gl:- logger_formatter:- report_cb:- error_logger:-"},
+ {datatype, string}
+]}.
+{mapping, "log.console.formatter.json.verbosity_map", "rabbit.log.console.formatter", [
+ {default, ""},
+ {datatype, string}
+]}.
+{translation, "rabbit.log.console.formatter",
+ fun(Conf) ->
+ rabbit_prelaunch_early_logging:translate_formatter_conf("log.console.formatter", Conf)
+ end}.
{mapping, "log.exchange", "rabbit.log.exchange.enabled", [
{datatype, {enum, [true, false]}}
@@ -1196,6 +1430,53 @@ end}.
{mapping, "log.exchange.level", "rabbit.log.exchange.level", [
{datatype, {enum, [debug, info, notice, warning, error, critical, alert, emergency, none]}}
]}.
+{mapping, "log.exchange.formatter", "rabbit.log.exchange.formatter", [
+ {default, plaintext},
+ {datatype, {enum, [plaintext, json]}}
+]}.
+{mapping, "log.exchange.formatter.time_format", "rabbit.log.console.formatter", [
+ {default, rfc3339_space},
+ {datatype, [{enum, [rfc3339_space, rfc3339_T, epoch_usecs, epoch_secs, lager_default]}, string]}
+]}.
+{mapping, "log.exchange.formatter.level_format", "rabbit.log.exchange.formatter", [
+ {default, lc},
+ {datatype, {enum, [lc, uc, lc3, uc3, lc4, uc4]}}
+]}.
+{mapping, "log.exchange.formatter.single_line", "rabbit.log.exchange.formatter", [
+ {default, off},
+ {datatype, flag}
+]}.
+{mapping, "log.exchange.formatter.plaintext.format", "rabbit.log.exchange.formatter", [
+ {default, "$time [$level] $pid $msg"},
+ {datatype, string}
+]}.
+{mapping, "log.exchange.formatter.json.field_map", "rabbit.log.exchange.formatter", [
+ {default, "time level msg gl:- logger_formatter:- report_cb:- error_logger:-"},
+ {datatype, string}
+]}.
+{mapping, "log.exchange.formatter.json.verbosity_map", "rabbit.log.exchange.formatter", [
+ {default, ""},
+ {datatype, string}
+]}.
+{translation, "rabbit.log.exchange.formatter",
+ fun(Conf) ->
+ rabbit_prelaunch_early_logging:translate_formatter_conf("log.exchange.formatter", Conf)
+ end}.
+
+{mapping, "log.journald", "rabbit.log.journald.enabled", [
+ {datatype, {enum, [true, false]}}
+]}.
+{mapping, "log.journald.level", "rabbit.log.journald.level", [
+ {datatype, {enum, [debug, info, notice, warning, error, critical, alert, emergency, none]}}
+]}.
+{mapping, "log.journald.fields", "rabbit.log.journald.fields", [
+ {default, "SYSLOG_IDENTIFIER=\"rabbitmq-server\" syslog_timestamp syslog_pid priority ERL_PID=pid CODE_FILE=file CODE_LINE=line CODE_MFA=mfa"},
+ {datatype, string}
+]}.
+{translation, "rabbit.log.journald.fields",
+ fun(Conf) ->
+ rabbit_prelaunch_early_logging:translate_journald_fields_conf("log.journald.fields", Conf)
+ end}.
{mapping, "log.syslog", "rabbit.log.syslog.enabled", [
{datatype, {enum, [true, false]}}
@@ -1204,6 +1485,38 @@ end}.
{mapping, "log.syslog.level", "rabbit.log.syslog.level", [
{datatype, {enum, [debug, info, notice, warning, error, critical, alert, emergency, none]}}
]}.
+{mapping, "log.syslog.formatter", "rabbit.log.syslog.formatter", [
+ {default, plaintext},
+ {datatype, {enum, [plaintext, json]}}
+]}.
+{mapping, "log.syslog.formatter.time_format", "rabbit.log.console.formatter", [
+ {default, rfc3339_space},
+ {datatype, [{enum, [rfc3339_space, rfc3339_T, epoch_usecs, epoch_secs, lager_default]}, string]}
+]}.
+{mapping, "log.syslog.formatter.level_format", "rabbit.log.syslog.formatter", [
+ {default, lc},
+ {datatype, {enum, [lc, uc, lc3, uc3, lc4, uc4]}}
+]}.
+{mapping, "log.syslog.formatter.single_line", "rabbit.log.syslog.formatter", [
+ {default, off},
+ {datatype, flag}
+]}.
+{mapping, "log.syslog.formatter.plaintext.format", "rabbit.log.syslog.formatter", [
+ {default, "$msg"},
+ {datatype, string}
+]}.
+{mapping, "log.syslog.formatter.json.field_map", "rabbit.log.syslog.formatter", [
+ {default, "time level msg gl:- logger_formatter:- report_cb:- error_logger:-"},
+ {datatype, string}
+]}.
+{mapping, "log.syslog.formatter.json.verbosity_map", "rabbit.log.syslog.formatter", [
+ {default, ""},
+ {datatype, string}
+]}.
+{translation, "rabbit.log.syslog.formatter",
+ fun(Conf) ->
+ rabbit_prelaunch_early_logging:translate_formatter_conf("log.syslog.formatter", Conf)
+ end}.
{mapping, "log.syslog.identity", "syslog.app_name", [
{datatype, string}
@@ -1404,12 +1717,48 @@ end}.
{mapping, "log.file.rotation.date", "rabbit.log.file.date", [
{datatype, string}
]}.
+{mapping, "log.file.rotation.compress", "rabbit.log.file.compress", [
+ {default, false},
+ {datatype, {enum, [true, false]}}
+]}.
{mapping, "log.file.rotation.size", "rabbit.log.file.size", [
{datatype, integer}
]}.
{mapping, "log.file.rotation.count", "rabbit.log.file.count", [
{datatype, integer}
]}.
+{mapping, "log.file.formatter", "rabbit.log.file.formatter", [
+ {default, plaintext},
+ {datatype, {enum, [plaintext, json]}}
+]}.
+{mapping, "log.file.formatter.time_format", "rabbit.log.file.formatter", [
+ {default, rfc3339_space},
+ {datatype, [{enum, [rfc3339_space, rfc3339_T, epoch_usecs, epoch_secs, lager_default]}, string]}
+]}.
+{mapping, "log.file.formatter.level_format", "rabbit.log.file.formatter", [
+ {default, lc},
+ {datatype, {enum, [lc, uc, lc3, uc3, lc4, uc4]}}
+]}.
+{mapping, "log.file.formatter.single_line", "rabbit.log.file.formatter", [
+ {default, off},
+ {datatype, flag}
+]}.
+{mapping, "log.file.formatter.plaintext.format", "rabbit.log.file.formatter", [
+ {default, "$time [$level] $pid $msg"},
+ {datatype, string}
+]}.
+{mapping, "log.file.formatter.json.field_map", "rabbit.log.file.formatter", [
+ {default, "time level msg gl:- logger_formatter:- report_cb:- error_logger:-"},
+ {datatype, string}
+]}.
+{mapping, "log.file.formatter.json.verbosity_map", "rabbit.log.file.formatter", [
+ {default, ""},
+ {datatype, string}
+]}.
+{translation, "rabbit.log.file.formatter",
+ fun(Conf) ->
+ rabbit_prelaunch_early_logging:translate_formatter_conf("log.file.formatter", Conf)
+ end}.
%% Log categories
@@ -1475,6 +1824,37 @@ end}.
{validators, ["non_zero_positive_integer"]}
]}.
+{mapping, "distribution.listener.port_range.min", "kernel.inet_dist_listen_min", [
+ {datatype, [integer]},
+ {validators, ["non_zero_positive_integer"]}
+]}.
+
+{mapping, "distribution.listener.port_range.max", "kernel.inet_dist_listen_max", [
+ {datatype, [integer]},
+ {validators, ["non_zero_positive_integer"]}
+]}.
+
+{mapping, "distribution.listener.interface", "kernel.inet_dist_use_interface", [
+ {datatype, [string]},
+ {validators, ["is_ip"]}
+]}.
+
+{translation, "kernel.inet_dist_use_interface",
+ fun(Conf) ->
+ case cuttlefish:conf_get("distribution.listener.interface", Conf, undefined) of
+ undefined ->
+ cuttlefish:unset();
+ Value when is_list(Value) ->
+ case inet:parse_address(Value) of
+ {ok, Parsed} -> Parsed;
+ {error, _} -> cuttlefish:invalid("should be a valid IP address")
+ end;
+ _ ->
+ cuttlefish:invalid("should be a valid IP address")
+ end
+ end
+}.
+
% ==========================
% sysmon_handler section
% ==========================
@@ -1734,14 +2114,14 @@ end}.
% Validators
% ===============================
-{validator, "size_less_than_2G", "Byte size should be less than 2G and greater than 0",
+{validator, "mirroring_sync_batch_size", "Batch size should be greater than 0 and less than 1M",
fun(Size) when is_integer(Size) ->
- Size > 0 andalso Size < 2147483648
+ Size > 0 andalso Size =< 1000000
end}.
-{validator, "less_then_512MB", "Max message size should be less than 512MB and gre than 0",
+{validator, "max_message_size", "Max message size should be between 0 and 512MB",
fun(Size) when is_integer(Size) ->
- Size > 0 andalso Size < 536870912
+ Size > 0 andalso Size =< 536870912
end}.
{validator, "less_than_1", "Float is not between 0 and 1",
@@ -1759,7 +2139,7 @@ fun(Int) when is_integer(Int) ->
Int >= 0 andalso Int =< 255
end}.
-{validator, "dir_writable", "Cannot create file in dir",
+{validator, "dir_writable", "Directory must be writable",
fun(Dir) ->
TestFile = filename:join(Dir, "test_file"),
file:delete(TestFile),
@@ -1768,13 +2148,15 @@ fun(Dir) ->
Res
end}.
-{validator, "file_accessible", "file doesn't exist or isn't readable",
+{validator, "file_accessible", "file does not exist or cannot be read by the node",
fun(File) ->
- ReadFile = file:read_file_info(File),
- element(1, ReadFile) == ok
+ case file:read_file_info(File) of
+ {ok, FileInfo} -> (element(4, FileInfo) == read) or (element(4, FileInfo) == read_write);
+ _ -> false
+ end
end}.
-{validator, "is_ip", "string is a valid IP address",
+{validator, "is_ip", "value should be a valid IP address",
fun(IpStr) ->
Res = inet:parse_address(IpStr),
element(1, Res) == ok
diff --git a/deps/rabbit/rabbitmq-components.mk b/deps/rabbit/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbit/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbit/scripts/rabbitmq-env b/deps/rabbit/scripts/rabbitmq-env
index 90702c43bb..4dde6f3948 100755
--- a/deps/rabbit/scripts/rabbitmq-env
+++ b/deps/rabbit/scripts/rabbitmq-env
@@ -119,8 +119,12 @@ DEFAULT_MAX_NUMBER_OF_ATOMS=5000000
[ -n "$MAX_NUMBER_OF_ATOMS" ] || MAX_NUMBER_OF_ATOMS="$DEFAULT_MAX_NUMBER_OF_ATOMS"
[ -n "$RABBITMQ_MAX_NUMBER_OF_ATOMS" ] || RABBITMQ_MAX_NUMBER_OF_ATOMS="$MAX_NUMBER_OF_ATOMS"
+DEFAULT_SCHEDULER_BUSY_WAIT_THRESHOLD=none
+[ -n "$SCHEDULER_BUSY_WAIT_THRESHOLD" ] || SCHEDULER_BUSY_WAIT_THRESHOLD="$DEFAULT_SCHEDULER_BUSY_WAIT_THRESHOLD"
+[ -n "$RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD" ] || RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD="$SCHEDULER_BUSY_WAIT_THRESHOLD"
+
## Common server defaults
-SERVER_ERL_ARGS=" +P $RABBITMQ_MAX_NUMBER_OF_PROCESSES +t $RABBITMQ_MAX_NUMBER_OF_ATOMS +stbt $RABBITMQ_SCHEDULER_BIND_TYPE +zdbbl $RABBITMQ_DISTRIBUTION_BUFFER_SIZE "
+SERVER_ERL_ARGS=" +P $RABBITMQ_MAX_NUMBER_OF_PROCESSES +t $RABBITMQ_MAX_NUMBER_OF_ATOMS +stbt $RABBITMQ_SCHEDULER_BIND_TYPE +zdbbl $RABBITMQ_DISTRIBUTION_BUFFER_SIZE +sbwt $RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD +sbwtdcpu $RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD +sbwtdio $RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD "
##--- Set environment vars RABBITMQ_<var_name> to defaults if not set
diff --git a/deps/rabbit/scripts/rabbitmq-env.bat b/deps/rabbit/scripts/rabbitmq-env.bat
index 1db57b33c5..049010556c 100644
--- a/deps/rabbit/scripts/rabbitmq-env.bat
+++ b/deps/rabbit/scripts/rabbitmq-env.bat
@@ -88,8 +88,16 @@ if "!RABBITMQ_MAX_NUMBER_OF_ATOMS!"=="" (
set RABBITMQ_MAX_NUMBER_OF_ATOMS=!DEFAULT_MAX_NUMBER_OF_ATOMS!
)
+set DEFAULT_SCHEDULER_BUSY_WAIT_THRESHOLD=none
+if "!RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD!"=="" (
+ set RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD=!SCHEDULER_BUSY_WAIT_THRESHOLD!
+)
+if "!RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD!"=="" (
+ set RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD=!DEFAULT_SCHEDULER_BUSY_WAIT_THRESHOLD!
+)
+
REM Common server defaults
-set SERVER_ERL_ARGS=+P !RABBITMQ_MAX_NUMBER_OF_PROCESSES! +t !RABBITMQ_MAX_NUMBER_OF_ATOMS! +stbt !RABBITMQ_SCHEDULER_BIND_TYPE! +zdbbl !RABBITMQ_DISTRIBUTION_BUFFER_SIZE!
+set SERVER_ERL_ARGS=+P !RABBITMQ_MAX_NUMBER_OF_PROCESSES! +t !RABBITMQ_MAX_NUMBER_OF_ATOMS! +stbt !RABBITMQ_SCHEDULER_BIND_TYPE! +zdbbl !RABBITMQ_DISTRIBUTION_BUFFER_SIZE! +sbwt !RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD! +sbwtdcpu !RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD! +sbwtdio !RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD!
REM ##--- Set environment vars RABBITMQ_<var_name> to defaults if not set
diff --git a/deps/rabbit/scripts/rabbitmq-server b/deps/rabbit/scripts/rabbitmq-server
index 82058dcb26..bccd91cdcf 100755
--- a/deps/rabbit/scripts/rabbitmq-server
+++ b/deps/rabbit/scripts/rabbitmq-server
@@ -79,8 +79,8 @@ start_rabbitmq_server() {
${RABBITMQ_SERVER_ERL_ARGS} \
${RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS} \
${RABBITMQ_SERVER_START_ARGS} \
- -lager crash_log false \
- -lager handlers '[]' \
+ -syslog logger '[]' \
+ -syslog syslog_error_logger false \
"$@"
}
diff --git a/deps/rabbit/scripts/rabbitmq-server.bat b/deps/rabbit/scripts/rabbitmq-server.bat
index 3a386b63c4..19cfff2674 100644
--- a/deps/rabbit/scripts/rabbitmq-server.bat
+++ b/deps/rabbit/scripts/rabbitmq-server.bat
@@ -68,8 +68,8 @@ if "!RABBITMQ_ALLOW_INPUT!"=="" (
!RABBITMQ_SERVER_ERL_ARGS! ^
!RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS! ^
!RABBITMQ_SERVER_START_ARGS! ^
--lager crash_log false ^
--lager handlers "[]" ^
+-syslog logger [] ^
+-syslog syslog_error_logger false ^
!STAR!
if ERRORLEVEL 1 (
diff --git a/deps/rabbit/scripts/rabbitmq-service.bat b/deps/rabbit/scripts/rabbitmq-service.bat
index 0b7906d4bf..7554890530 100644
--- a/deps/rabbit/scripts/rabbitmq-service.bat
+++ b/deps/rabbit/scripts/rabbitmq-service.bat
@@ -198,8 +198,8 @@ set ERLANG_SERVICE_ARGUMENTS= ^
!RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS! ^
!RABBITMQ_SERVER_START_ARGS! ^
!RABBITMQ_DIST_ARG! ^
--lager crash_log false ^
--lager handlers "[]" ^
+-syslog logger [] ^
+-syslog syslog_error_logger false ^
!STARVAR!
set ERLANG_SERVICE_ARGUMENTS=!ERLANG_SERVICE_ARGUMENTS:\=\\!
@@ -223,6 +223,8 @@ rem user-specific directory.
-env ERL_LIBS="!ERL_LIBS!" ^
-env ERL_MAX_ETS_TABLES="!ERL_MAX_ETS_TABLES!" ^
-env ERL_MAX_PORTS="!ERL_MAX_PORTS!" ^
+-env RABBITMQ_BASE="!RABBITMQ_BASE!" ^
+-env RABBITMQ_NODENAME="!RABBITMQ_NODENAME!" ^
-workdir "!RABBITMQ_BASE!" ^
-stopaction "rabbit:stop_and_halt()." ^
!RABBITMQ_NAME_TYPE! !RABBITMQ_NODENAME! ^
diff --git a/deps/rabbit/scripts/rabbitmq-streams b/deps/rabbit/scripts/rabbitmq-streams
index 376cc497df..5b64991d75 100755
--- a/deps/rabbit/scripts/rabbitmq-streams
+++ b/deps/rabbit/scripts/rabbitmq-streams
@@ -1,18 +1,10 @@
#!/bin/sh
-## The contents of this file are subject to the Mozilla Public License
-## Version 1.1 (the "License"); you may not use this file except in
-## compliance with the License. You may obtain a copy of the License
-## at https://www.mozilla.org/MPL/
-##
-## Software distributed under the License is distributed on an "AS IS"
-## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-## the License for the specific language governing rights and
-## limitations under the License.
-##
-## The Original Code is RabbitMQ.
+
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## The Initial Developer of the Original Code is GoPivotal, Inc.
-## Copyright (c) 2007-2020 Pivotal Software, Inc. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
##
# Exit immediately if a pipeline, which may consist of a single simple command,
diff --git a/deps/rabbit/scripts/rabbitmq-streams.bat b/deps/rabbit/scripts/rabbitmq-streams.bat
index 83572a8d62..7f4fca8f9e 100644
--- a/deps/rabbit/scripts/rabbitmq-streams.bat
+++ b/deps/rabbit/scripts/rabbitmq-streams.bat
@@ -1,18 +1,10 @@
@echo off
-REM The contents of this file are subject to the Mozilla Public License
-REM Version 1.1 (the "License"); you may not use this file except in
-REM compliance with the License. You may obtain a copy of the License
-REM at https://www.mozilla.org/MPL/
-REM
-REM Software distributed under the License is distributed on an "AS IS"
-REM basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-REM the License for the specific language governing rights and
-REM limitations under the License.
-REM
-REM The Original Code is RabbitMQ.
+
+REM This Source Code Form is subject to the terms of the Mozilla Public
+REM License, v. 2.0. If a copy of the MPL was not distributed with this
+REM file, You can obtain one at https://mozilla.org/MPL/2.0/.
REM
-REM The Initial Developer of the Original Code is GoPivotal, Inc.
-REM Copyright (c) 2007-2020 Pivotal Software, Inc. All rights reserved.
+REM Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
REM
REM Scopes the variables to the current batch file
diff --git a/deps/rabbit/scripts/rabbitmq-tanzu b/deps/rabbit/scripts/rabbitmq-tanzu
new file mode 100755
index 0000000000..dbfb3f8685
--- /dev/null
+++ b/deps/rabbit/scripts/rabbitmq-tanzu
@@ -0,0 +1,23 @@
+#!/bin/sh
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+# Exit immediately if a pipeline, which may consist of a single simple command,
+# a list, or a compound command returns a non-zero status
+set -e
+
+# Each variable or function that is created or modified is given the export
+# attribute and marked for export to the environment of subsequent commands.
+set -a
+
+# shellcheck source=/dev/null
+#
+# TODO: when shellcheck adds support for relative paths, change to
+# shellcheck source=./rabbitmq-env
+. "${0%/*}"/rabbitmq-env
+
+run_escript rabbitmqctl_escript "${ESCRIPT_DIR:?must be defined}"/rabbitmq-tanzu "$@"
diff --git a/deps/rabbit/scripts/rabbitmq-tanzu.bat b/deps/rabbit/scripts/rabbitmq-tanzu.bat
new file mode 100644
index 0000000000..1b95d764d6
--- /dev/null
+++ b/deps/rabbit/scripts/rabbitmq-tanzu.bat
@@ -0,0 +1,56 @@
+@echo off
+REM This Source Code Form is subject to the terms of the Mozilla Public
+REM License, v. 2.0. If a copy of the MPL was not distributed with this
+REM file, You can obtain one at https://mozilla.org/MPL/2.0/.
+REM
+REM Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+REM
+
+REM Scopes the variables to the current batch file
+setlocal
+
+rem Preserve values that might contain exclamation marks before
+rem enabling delayed expansion
+set TDP0=%~dp0
+set STAR=%*
+setlocal enabledelayedexpansion
+
+REM Get default settings with user overrides for (RABBITMQ_)<var_name>
+REM Non-empty defaults should be set in rabbitmq-env
+call "%TDP0%\rabbitmq-env.bat" %~n0
+
+if not exist "!ERLANG_HOME!\bin\erl.exe" (
+ echo.
+ echo ******************************
+ echo ERLANG_HOME not set correctly.
+ echo ******************************
+ echo.
+ echo Please either set ERLANG_HOME to point to your Erlang installation or place the
+ echo RabbitMQ server distribution in the Erlang lib folder.
+ echo.
+ exit /B 1
+)
+
+REM Disable erl_crash.dump by default for control scripts.
+if not defined ERL_CRASH_DUMP_SECONDS (
+ set ERL_CRASH_DUMP_SECONDS=0
+)
+
+"!ERLANG_HOME!\bin\erl.exe" +B ^
+-boot !CLEAN_BOOT_FILE! ^
+-noinput -noshell -hidden -smp enable ^
+!RABBITMQ_CTL_ERL_ARGS! ^
+-kernel inet_dist_listen_min !RABBITMQ_CTL_DIST_PORT_MIN! ^
+-kernel inet_dist_listen_max !RABBITMQ_CTL_DIST_PORT_MAX! ^
+-run escript start ^
+-escript main rabbitmqctl_escript ^
+-extra "%RABBITMQ_HOME%\escript\rabbitmq-tanzu" !STAR!
+
+if ERRORLEVEL 1 (
+ exit /B %ERRORLEVEL%
+)
+
+EXIT /B 0
+
+endlocal
+endlocal
diff --git a/deps/rabbit/src/amqqueue.erl b/deps/rabbit/src/amqqueue.erl
index 3415ebd073..31cc650879 100644
--- a/deps/rabbit/src/amqqueue.erl
+++ b/deps/rabbit/src/amqqueue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqqueue). %% Could become amqqueue_v2 in the future.
@@ -38,7 +38,9 @@
% operator_policy
get_operator_policy/1,
set_operator_policy/2,
+ % options
get_options/1,
+ set_options/2,
% pid
get_pid/1,
set_pid/2,
@@ -482,6 +484,13 @@ set_name(Queue, Name) ->
get_options(#amqqueue{options = Options}) -> Options;
get_options(Queue) -> amqqueue_v1:get_options(Queue).
+-spec set_options(amqqueue(), map()) -> amqqueue().
+
+set_options(#amqqueue{} = Queue, Options) ->
+ Queue#amqqueue{options = Options};
+set_options(Queue, Options) ->
+ amqqueue_v1:set_options(Queue, Options).
+
% pid
-spec get_pid
diff --git a/deps/rabbit/src/amqqueue_v1.erl b/deps/rabbit/src/amqqueue_v1.erl
index dd1de74a4e..2821b07b56 100644
--- a/deps/rabbit/src/amqqueue_v1.erl
+++ b/deps/rabbit/src/amqqueue_v1.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqqueue_v1).
@@ -38,7 +38,9 @@
% operator_policy
get_operator_policy/1,
set_operator_policy/2,
+ % options
get_options/1,
+ set_options/2,
% pid
get_pid/1,
set_pid/2,
@@ -392,10 +394,17 @@ get_name(#amqqueue{name = Name}) -> Name.
set_name(#amqqueue{} = Queue, Name) ->
Queue#amqqueue{name = Name}.
+%% options
+
-spec get_options(amqqueue()) -> map().
get_options(#amqqueue{options = Options}) -> Options.
+-spec set_options(amqqueue(), map()) -> amqqueue().
+
+set_options(#amqqueue{} = Queue, Options) ->
+ Queue#amqqueue{options = Options}.
+
% pid
-spec get_pid
diff --git a/deps/rabbit/src/background_gc.erl b/deps/rabbit/src/background_gc.erl
index be5bf0c995..bd8b8cde2c 100644
--- a/deps/rabbit/src/background_gc.erl
+++ b/deps/rabbit/src/background_gc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(background_gc).
diff --git a/deps/rabbit/src/code_server_cache.erl b/deps/rabbit/src/code_server_cache.erl
index b53f5dcee9..2eabc23b36 100644
--- a/deps/rabbit/src/code_server_cache.erl
+++ b/deps/rabbit/src/code_server_cache.erl
@@ -4,7 +4,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(code_server_cache).
@@ -70,7 +70,7 @@ handle_maybe_call_mfa(true, {Module, Function, Args, Default}, State) ->
error:undef ->
handle_maybe_call_mfa_error(Module, Default, State);
Err:Reason ->
- rabbit_log:error("Calling ~p:~p failed: ~p:~p~n",
+ rabbit_log:error("Calling ~p:~p failed: ~p:~p",
[Module, Function, Err, Reason]),
handle_maybe_call_mfa_error(Module, Default, State)
end.
diff --git a/deps/rabbit/src/gatherer.erl b/deps/rabbit/src/gatherer.erl
index 2b46ec02b1..85cda60d84 100644
--- a/deps/rabbit/src/gatherer.erl
+++ b/deps/rabbit/src/gatherer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(gatherer).
diff --git a/deps/rabbit/src/gm.erl b/deps/rabbit/src/gm.erl
index af24a2958a..56880100b7 100644
--- a/deps/rabbit/src/gm.erl
+++ b/deps/rabbit/src/gm.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(gm).
diff --git a/deps/rabbit/src/internal_user.erl b/deps/rabbit/src/internal_user.erl
index b2bdcb6785..1cc2052fe8 100644
--- a/deps/rabbit/src/internal_user.erl
+++ b/deps/rabbit/src/internal_user.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(internal_user).
diff --git a/deps/rabbit/src/internal_user_v1.erl b/deps/rabbit/src/internal_user_v1.erl
index edb956436f..d98665cfb0 100644
--- a/deps/rabbit/src/internal_user_v1.erl
+++ b/deps/rabbit/src/internal_user_v1.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(internal_user_v1).
diff --git a/deps/rabbit/src/lager_exchange_backend.erl b/deps/rabbit/src/lager_exchange_backend.erl
deleted file mode 100644
index cd96f2230e..0000000000
--- a/deps/rabbit/src/lager_exchange_backend.erl
+++ /dev/null
@@ -1,233 +0,0 @@
-%% This Source Code Form is subject to the terms of the Mozilla Public
-%% License, v. 2.0. If a copy of the MPL was not distributed with this
-%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
-%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-%%
-
-%% @doc RabbitMQ backend for lager.
-%% Configuration is a proplist with the following keys:
-%% <ul>
-%% <li>`level' - log level to use</li>
-%% <li>`formatter' - the module to use when formatting log messages. Defaults to
-%% `lager_default_formatter'</li>
-%% <li>`formatter_config' - the format configuration string. Defaults to
-%% `time [ severity ] message'</li>
-%% </ul>
-
--module(lager_exchange_backend).
-
--behaviour(gen_event).
-
--export([init/1, terminate/2, code_change/3,
- handle_call/2, handle_event/2, handle_info/2]).
-
--export([maybe_init_exchange/0]).
-
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
-
--include_lib("lager/include/lager.hrl").
-
--record(state, {level :: {'mask', integer()},
- formatter :: atom(),
- format_config :: any(),
- init_exchange_ts = undefined :: integer() | undefined,
- exchange = undefined :: #resource{} | undefined}).
-
--ifdef(TEST).
--include_lib("eunit/include/eunit.hrl").
--compile([{parse_transform, lager_transform}]).
--endif.
-
--define(INIT_EXCHANGE_INTERVAL_SECS, 5).
--define(TERSE_FORMAT, [time, " [", severity, "] ", message]).
--define(DEFAULT_FORMAT_CONFIG, ?TERSE_FORMAT).
--define(FORMAT_CONFIG_OFF, []).
-
--ifdef(TEST).
--define(DEPRECATED(_Msg), ok).
--else.
--define(DEPRECATED(Msg),
- io:format(user, "WARNING: This is a deprecated lager_exchange_backend configuration. Please use \"~w\" instead.~n", [Msg])).
--endif.
-
--define(LOG_EXCH_NAME, <<"amq.rabbitmq.log">>).
-
-init([Level]) when is_atom(Level) ->
- ?DEPRECATED([{level, Level}]),
- init([{level, Level}]);
-init([Level, true]) when is_atom(Level) -> % for backwards compatibility
- ?DEPRECATED([{level, Level}, {formatter_config, [{eol, "\\r\\n\\"}]}]),
- init([{level, Level}, {formatter_config, ?FORMAT_CONFIG_OFF}]);
-init([Level, false]) when is_atom(Level) -> % for backwards compatibility
- ?DEPRECATED([{level, Level}]),
- init([{level, Level}]);
-
-init(Options) when is_list(Options) ->
- true = validate_options(Options),
- Level = get_option(level, Options, undefined),
- try lager_util:config_to_mask(Level) of
- L ->
- DefaultOptions = [{formatter, lager_default_formatter},
- {formatter_config, ?DEFAULT_FORMAT_CONFIG}],
- [Formatter, Config] = [get_option(K, Options, Default) || {K, Default} <- DefaultOptions],
- State0 = #state{level=L,
- formatter=Formatter,
- format_config=Config},
- % NB: this will probably always fail since the / vhost isn't available
- State1 = maybe_init_exchange(State0),
- {ok, State1}
- catch
- _:_ ->
- {error, {fatal, bad_log_level}}
- end;
-init(Level) when is_atom(Level) ->
- ?DEPRECATED([{level, Level}]),
- init([{level, Level}]);
-init(Other) ->
- {error, {fatal, {bad_lager_exchange_backend_config, Other}}}.
-
-% rabbitmq/rabbitmq-server#1973
-% This is called immediatly after the / vhost is created
-% or recovered
-maybe_init_exchange() ->
- case lists:member(?MODULE, gen_event:which_handlers(lager_event)) of
- true ->
- _ = init_exchange(true),
- ok;
- _ ->
- ok
- end.
-
-validate_options([]) -> true;
-validate_options([{level, L}|T]) when is_atom(L) ->
- case lists:member(L, ?LEVELS) of
- false ->
- throw({error, {fatal, {bad_level, L}}});
- true ->
- validate_options(T)
- end;
-validate_options([{formatter, M}|T]) when is_atom(M) ->
- validate_options(T);
-validate_options([{formatter_config, C}|T]) when is_list(C) ->
- validate_options(T);
-validate_options([H|_]) ->
- throw({error, {fatal, {bad_lager_exchange_backend_config, H}}}).
-
-get_option(K, Options, Default) ->
- case lists:keyfind(K, 1, Options) of
- {K, V} -> V;
- false -> Default
- end.
-
-handle_call(get_loglevel, #state{level=Level} = State) ->
- {ok, Level, State};
-handle_call({set_loglevel, Level}, State) ->
- try lager_util:config_to_mask(Level) of
- Levels ->
- {ok, ok, State#state{level=Levels}}
- catch
- _:_ ->
- {ok, {error, bad_log_level}, State}
- end;
-handle_call(_Request, State) ->
- {ok, ok, State}.
-
-handle_event({log, _Message} = Event, State0) ->
- State1 = maybe_init_exchange(State0),
- handle_log_event(Event, State1);
-handle_event(_Event, State) ->
- {ok, State}.
-
-handle_info(_Info, State) ->
- {ok, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-%% @private
-handle_log_event({log, _Message}, #state{exchange=undefined} = State) ->
- % NB: tried to define the exchange but still undefined,
- % so not logging this message. Note: we can't log this dropped
- % message because it will start an infinite loop
- {ok, State};
-handle_log_event({log, Message},
- #state{level=L, exchange=LogExch,
- formatter=Formatter, format_config=FormatConfig} = State) ->
- case lager_util:is_loggable(Message, L, ?MODULE) of
- true ->
- %% 0-9-1 says the timestamp is a "64 bit POSIX timestamp". That's
- %% second resolution, not millisecond.
- RoutingKey = rabbit_data_coercion:to_binary(lager_msg:severity(Message)),
- Timestamp = os:system_time(seconds),
- Node = rabbit_data_coercion:to_binary(node()),
- Headers = [{<<"node">>, longstr, Node}],
- AmqpMsg = #'P_basic'{content_type = <<"text/plain">>,
- timestamp = Timestamp,
- headers = Headers},
- Body = rabbit_data_coercion:to_binary(Formatter:format(Message, FormatConfig)),
- case rabbit_basic:publish(LogExch, RoutingKey, AmqpMsg, Body) of
- ok -> ok;
- {error, not_found} -> ok
- end,
- {ok, State};
- false ->
- {ok, State}
- end.
-
-%% @private
-maybe_init_exchange(#state{exchange=undefined, init_exchange_ts=undefined} = State) ->
- Now = erlang:monotonic_time(second),
- handle_init_exchange(init_exchange(true), Now, State);
-maybe_init_exchange(#state{exchange=undefined, init_exchange_ts=Timestamp} = State) ->
- Now = erlang:monotonic_time(second),
- % NB: since we may try to declare the exchange on every log message, this ensures
- % that we only try once every 5 seconds
- HasEnoughTimeElapsed = Now - Timestamp > ?INIT_EXCHANGE_INTERVAL_SECS,
- Result = init_exchange(HasEnoughTimeElapsed),
- handle_init_exchange(Result, Now, State);
-maybe_init_exchange(State) ->
- State.
-
-%% @private
-init_exchange(true) ->
- {ok, DefaultVHost} = application:get_env(rabbit, default_vhost),
- Exchange = rabbit_misc:r(DefaultVHost, exchange, ?LOG_EXCH_NAME),
- try
- %% durable
- #exchange{} = rabbit_exchange:declare(Exchange, topic, true, false, true, [], ?INTERNAL_USER),
- rabbit_log:info("Declared exchange '~s' in vhost '~s'", [?LOG_EXCH_NAME, DefaultVHost]),
- {ok, Exchange}
- catch
- ErrType:Err ->
- rabbit_log:error("Could not declare exchange '~s' in vhost '~s', reason: ~p:~p",
- [?LOG_EXCH_NAME, DefaultVHost, ErrType, Err]),
- {ok, undefined}
- end;
-init_exchange(_) ->
- {ok, undefined}.
-
-%% @private
-handle_init_exchange({ok, undefined}, Now, State) ->
- State#state{init_exchange_ts=Now};
-handle_init_exchange({ok, Exchange}, Now, State) ->
- State#state{exchange=Exchange, init_exchange_ts=Now}.
-
--ifdef(TEST).
-console_config_validation_test_() ->
- Good = [{level, info}],
- Bad1 = [{level, foo}],
- Bad2 = [{larval, info}],
- AllGood = [{level, info}, {formatter, my_formatter},
- {formatter_config, ["blort", "garbage"]}],
- [
- ?_assertEqual(true, validate_options(Good)),
- ?_assertThrow({error, {fatal, {bad_level, foo}}}, validate_options(Bad1)),
- ?_assertThrow({error, {fatal, {bad_lager_exchange_backend_config, {larval, info}}}}, validate_options(Bad2)),
- ?_assertEqual(true, validate_options(AllGood))
- ].
--endif.
diff --git a/deps/rabbit/src/lqueue.erl b/deps/rabbit/src/lqueue.erl
index 1e267210d9..a018a98448 100644
--- a/deps/rabbit/src/lqueue.erl
+++ b/deps/rabbit/src/lqueue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(lqueue).
diff --git a/deps/rabbit/src/mirrored_supervisor_sups.erl b/deps/rabbit/src/mirrored_supervisor_sups.erl
index b29d4d48e6..e927a167ee 100644
--- a/deps/rabbit/src/mirrored_supervisor_sups.erl
+++ b/deps/rabbit/src/mirrored_supervisor_sups.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(mirrored_supervisor_sups).
diff --git a/deps/rabbit/src/pg_local.erl b/deps/rabbit/src/pg_local.erl
index 263e743d1f..75f2578d79 100644
--- a/deps/rabbit/src/pg_local.erl
+++ b/deps/rabbit/src/pg_local.erl
@@ -13,7 +13,7 @@
%% versions of Erlang/OTP. The remaining type specs have been
%% removed.
-%% All modifications are (C) 2010-2020 VMware, Inc. or its affiliates.
+%% All modifications are (C) 2010-2021 VMware, Inc. or its affiliates.
%% %CopyrightBegin%
%%
diff --git a/deps/rabbit/src/pid_recomposition.erl b/deps/rabbit/src/pid_recomposition.erl
new file mode 100644
index 0000000000..c98411b6bc
--- /dev/null
+++ b/deps/rabbit/src/pid_recomposition.erl
@@ -0,0 +1,71 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(pid_recomposition).
+
+
+%% API
+-export([
+ to_binary/1,
+ from_binary/1,
+ decompose/1,
+ recompose/1
+]).
+
+-define(TTB_PREFIX, 131).
+
+-define(NEW_PID_EXT, 88).
+-define(PID_EXT, 103).
+-define(ATOM_UTF8_EXT, 118).
+-define(SMALL_ATOM_UTF8_EXT, 119).
+
+%%
+%% API
+%%
+
+-spec decompose(pid()) -> #{atom() => any()}.
+decompose(Pid) ->
+ from_binary(term_to_binary(Pid, [{minor_version, 2}])).
+
+-spec from_binary(binary()) -> #{atom() => any()}.
+from_binary(Bin) ->
+ PidData = case Bin of
+ %% Erlang 23+
+ <<?TTB_PREFIX, ?NEW_PID_EXT, Val0/binary>> -> Val0;
+ %% Erlang 22
+ <<?TTB_PREFIX, ?PID_EXT, Val1/binary>> -> Val1
+ end,
+ {Node, Rest2} = case PidData of
+ <<?ATOM_UTF8_EXT, AtomLen:16/integer, Node0:AtomLen/binary, Rest1/binary>> ->
+ {Node0, Rest1};
+ <<?SMALL_ATOM_UTF8_EXT, AtomLen/integer, Node0:AtomLen/binary, Rest1/binary>> ->
+ {Node0, Rest1}
+ end,
+ {ID, Serial, Creation} = case Rest2 of
+ %% NEW_PID_EXT on Erlang 23+
+ <<ID0:32/integer, Serial0:32/integer, Creation0:32/integer>> ->
+ {ID0, Serial0, Creation0};
+ %% PID_EXT on Erlang 22
+ <<ID1:32/integer, Serial1:32/integer, Creation1:8/integer>> ->
+ {ID1, Serial1, Creation1}
+ end,
+ #{
+ node => binary_to_atom(Node, utf8),
+ id => ID,
+ serial => Serial,
+ creation => Creation
+ }.
+
+-spec to_binary(#{atom() => any()}) -> binary().
+to_binary(#{node := Node, id := ID, serial := Serial, creation := Creation}) ->
+ BinNode = atom_to_binary(Node, utf8),
+ NodeLen = byte_size(BinNode),
+ <<?TTB_PREFIX:8/unsigned, ?NEW_PID_EXT:8/unsigned, ?ATOM_UTF8_EXT:8/unsigned, NodeLen:16/unsigned, BinNode/binary, ID:32, Serial:32, Creation:32>>.
+
+-spec recompose(#{atom() => any()}) -> pid().
+recompose(M) ->
+ binary_to_term(to_binary(M)).
diff --git a/deps/rabbit/src/rabbit.erl b/deps/rabbit/src/rabbit.erl
index 9248c945dc..48ef9ec439 100644
--- a/deps/rabbit/src/rabbit.erl
+++ b/deps/rabbit/src/rabbit.erl
@@ -2,11 +2,17 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit).
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("kernel/include/logger.hrl").
+-include_lib("rabbit_common/include/logging.hrl").
+
+-ignore_xref({rabbit_direct, force_event_refresh, 1}).
+-ignore_xref({rabbit_networking, force_connection_event_refresh, 1}).
%% Transitional step until we can require Erlang/OTP 21 and
%% use the now recommended try/catch syntax for obtaining the stack trace.
-compile(nowarn_deprecated_function).
@@ -28,7 +34,8 @@
base_product_version/0,
motd_file/0,
motd/0]).
--export([log_locations/0, config_files/0]). %% for testing and mgmt-agent
+%% For CLI, testing and mgmt-agent.
+-export([set_log_level/1, log_locations/0, config_files/0]).
-export([is_booted/1, is_booted/0, is_booting/1, is_booting/0]).
%%---------------------------------------------------------------------------
@@ -121,12 +128,12 @@
{requires, pre_boot},
{enables, external_infrastructure}]}).
-%% -rabbit_boot_step({rabbit_stream_coordinator,
-%% [{description, "stream queues coordinator"},
-%% {mfa, {rabbit_stream_coordinator, start,
-%% []}},
-%% {requires, pre_boot},
-%% {enables, external_infrastructure}]}).
+-rabbit_boot_step({rabbit_global_counters,
+ [{description, "global counters"},
+ {mfa, {rabbit_global_counters, boot_step,
+ []}},
+ {requires, pre_boot},
+ {enables, external_infrastructure}]}).
-rabbit_boot_step({rabbit_event,
[{description, "statistics event manager"},
@@ -261,17 +268,17 @@
-rabbit_boot_step({networking,
[{description, "TCP and TLS listeners (backwards compatibility)"},
- {mfa, {rabbit_log, debug, ["'networking' boot step skipped and moved to end of startup", []]}},
+ {mfa, {logger, debug, ["'networking' boot step skipped and moved to end of startup", [], #{domain => ?RMQLOG_DOMAIN_GLOBAL}]}},
{requires, notify_cluster}]}).
%%---------------------------------------------------------------------------
--include("rabbit_framing.hrl").
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-define(APPS, [os_mon, mnesia, rabbit_common, rabbitmq_prelaunch, ra, sysmon_handler, rabbit, osiris]).
--define(ASYNC_THREADS_WARNING_THRESHOLD, 8).
+-define(DIRTY_IO_SCHEDULERS_WARNING_THRESHOLD, 10).
%% 1 minute
-define(BOOT_START_TIMEOUT, 1 * 60 * 1000).
@@ -280,6 +287,8 @@
%% 100 ms
-define(BOOT_STATUS_CHECK_INTERVAL, 100).
+-define(COORD_WAL_MAX_SIZE_B, 64_000_000).
+
%%----------------------------------------------------------------------------
-type restart_type() :: 'permanent' | 'transient' | 'temporary'.
@@ -335,12 +344,12 @@ run_prelaunch_second_phase() ->
case IsInitialPass of
true ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG(
"== Prelaunch phase [2/2] (initial pass) ==");
false ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Prelaunch phase [2/2] =="),
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Prelaunch phase [2/2] =="),
ok
end,
@@ -357,11 +366,28 @@ run_prelaunch_second_phase() ->
ok = rabbit_prelaunch_cluster:setup(Context),
%% Start Mnesia now that everything is ready.
- rabbit_log_prelaunch:debug("Starting Mnesia"),
+ ?LOG_DEBUG("Starting Mnesia"),
ok = mnesia:start(),
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Prelaunch DONE =="),
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Prelaunch DONE =="),
+
+ ?LOG_DEBUG("Starting Ra Systems"),
+ Default = ra_system:default_config(),
+ Quorum = Default#{name => quorum_queues},
+ % names => ra_system:derive_names(quorum)},
+ CoordDataDir = filename:join([rabbit_mnesia:dir(), "coordination", node()]),
+ Coord = Default#{name => coordination,
+ data_dir => CoordDataDir,
+ wal_data_dir => CoordDataDir,
+ wal_max_size_bytes => ?COORD_WAL_MAX_SIZE_B,
+ names => ra_system:derive_names(coordination)},
+
+ {ok, _} = ra_system:start(Quorum),
+ {ok, _} = ra_system:start(Coord),
+
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Ra System Start done DONE =="),
case IsInitialPass of
true -> rabbit_prelaunch:initial_pass_finished();
@@ -373,7 +399,8 @@ start_it(StartType) ->
case spawn_boot_marker() of
{ok, Marker} ->
T0 = erlang:timestamp(),
- rabbit_log:info("RabbitMQ is asked to start...", []),
+ ?LOG_INFO("RabbitMQ is asked to start...", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
try
{ok, _} = application:ensure_all_started(rabbitmq_prelaunch,
StartType),
@@ -382,7 +409,7 @@ start_it(StartType) ->
ok = wait_for_ready_or_stopped(),
T1 = erlang:timestamp(),
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Time to start RabbitMQ: ~p µs",
[timer:now_diff(T1, T0)]),
stop_boot_marker(Marker),
@@ -433,11 +460,13 @@ stop() ->
case rabbit_boot_state:get() of
ready ->
Product = product_name(),
- rabbit_log:info("~s is asked to stop...", [Product]),
+ ?LOG_INFO("~s is asked to stop...", [Product],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
do_stop(),
- rabbit_log:info(
+ ?LOG_INFO(
"Successfully stopped ~s and its dependencies",
- [Product]),
+ [Product],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok;
stopped ->
ok
@@ -461,19 +490,22 @@ stop_and_halt() ->
try
stop()
catch Type:Reason ->
- rabbit_log:error(
+ ?LOG_ERROR(
"Error trying to stop ~s: ~p:~p",
- [product_name(), Type, Reason]),
+ [product_name(), Type, Reason],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
error({Type, Reason})
after
%% Enclose all the logging in the try block.
%% init:stop() will be called regardless of any errors.
try
AppsLeft = [ A || {A, _, _} <- application:which_applications() ],
- rabbit_log:info(
- lists:flatten(["Halting Erlang VM with the following applications:~n",
- [" ~p~n" || _ <- AppsLeft]]),
- AppsLeft),
+ ?LOG_INFO(
+ lists:flatten(
+ ["Halting Erlang VM with the following applications:~n",
+ [" ~p~n" || _ <- AppsLeft]]),
+ AppsLeft,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
%% Also duplicate this information to stderr, so console where
%% foreground broker was running (or systemd journal) will
%% contain information about graceful termination.
@@ -493,7 +525,7 @@ start_apps(Apps) ->
#{app_name() => restart_type()}) -> 'ok'.
%% TODO: start_apps/2 and is now specific to plugins. This function
-%% should be moved over `rabbit_plugins`, along with stop_apps/1, once
+%% should be moved over `rabbit_plugins', along with stop_apps/1, once
%% the latter stops using app_utils as well.
start_apps(Apps, RestartTypes) ->
@@ -518,10 +550,12 @@ start_apps(Apps, RestartTypes) ->
stop_apps([]) ->
ok;
stop_apps(Apps) ->
- rabbit_log:info(
- lists:flatten(["Stopping ~s applications and their dependencies in the following order:~n",
- [" ~p~n" || _ <- Apps]]),
- [product_name() | lists:reverse(Apps)]),
+ ?LOG_INFO(
+ lists:flatten(
+ ["Stopping ~s applications and their dependencies in the following order:~n",
+ [" ~p~n" || _ <- Apps]]),
+ [product_name() | lists:reverse(Apps)],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok = app_utils:stop_applications(
Apps, handle_app_error(error_during_shutdown)),
case lists:member(rabbit, Apps) of
@@ -542,10 +576,9 @@ handle_app_error(Term) ->
is_booting() -> is_booting(node()).
is_booting(Node) when Node =:= node() ->
- case rabbit_boot_state:get() of
- booting -> true;
- _ -> false
- end;
+ rabbit_boot_state:has_reached_and_is_active(booting)
+ andalso
+ not rabbit_boot_state:has_reached(ready);
is_booting(Node) ->
case rpc:call(Node, rabbit, is_booting, []) of
{badrpc, _} = Err -> Err;
@@ -786,28 +819,10 @@ environment(App) ->
-spec rotate_logs() -> rabbit_types:ok_or_error(any()).
rotate_logs() ->
- rabbit_lager:fold_sinks(
- fun
- (_, [], Acc) ->
- Acc;
- (SinkName, FileNames, Acc) ->
- lager:log(SinkName, info, self(),
- "Log file rotation forced", []),
- %% FIXME: We use an internal message, understood by
- %% lager_file_backend. We should use a proper API, when
- %% it's added to Lager.
- %%
- %% FIXME: This call is effectively asynchronous: at the
- %% end of this function, we can't guaranty the rotation
- %% is completed.
- [ok = gen_event:call(SinkName,
- {lager_file_backend, FileName},
- rotate,
- infinity) || FileName <- FileNames],
- lager:log(SinkName, info, self(),
- "Log file re-opened after forced rotation", []),
- Acc
- end, ok).
+ ?LOG_ERROR(
+ "Forcing log rotation is currently unsupported",
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ {error, unsupported}.
%%--------------------------------------------------------------------
@@ -836,14 +851,20 @@ start(normal, []) ->
#{product_overridden := true,
product_base_name := BaseName,
product_base_version := BaseVersion} ->
- rabbit_log:info("~n Starting ~s ~s on Erlang ~s~n Based on ~s ~s~n ~s~n ~s~n",
- [product_name(), product_version(), rabbit_misc:otp_release(),
- BaseName, BaseVersion,
- ?COPYRIGHT_MESSAGE, ?INFORMATION_MESSAGE]);
+ ?LOG_INFO(
+ "~n Starting ~s ~s on Erlang ~s [~s]~n Based on ~s ~s~n ~s~n ~s",
+ [product_name(), product_version(), rabbit_misc:otp_release(),
+ emu_flavor(),
+ BaseName, BaseVersion,
+ ?COPYRIGHT_MESSAGE, ?INFORMATION_MESSAGE],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH});
_ ->
- rabbit_log:info("~n Starting ~s ~s on Erlang ~s~n ~s~n ~s~n",
- [product_name(), product_version(), rabbit_misc:otp_release(),
- ?COPYRIGHT_MESSAGE, ?INFORMATION_MESSAGE])
+ ?LOG_INFO(
+ "~n Starting ~s ~s on Erlang ~s [~s]~n ~s~n ~s",
+ [product_name(), product_version(), rabbit_misc:otp_release(),
+ emu_flavor(),
+ ?COPYRIGHT_MESSAGE, ?INFORMATION_MESSAGE],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})
end,
log_motd(),
{ok, SupPid} = rabbit_sup:start_link(),
@@ -861,7 +882,7 @@ start(normal, []) ->
%%
%% Note that plugins were not taken care of at this point
%% either.
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Register `rabbit` process (~p) for rabbit_node_monitor",
[self()]),
true = register(rabbit, self()),
@@ -870,11 +891,30 @@ start(normal, []) ->
log_banner(),
warn_if_kernel_config_dubious(),
warn_if_disc_io_options_dubious(),
- %% We run `rabbit` boot steps only for now. Plugins boot steps
- %% will be executed as part of the postlaunch phase after they
- %% are started.
- rabbit_boot_steps:run_boot_steps([rabbit]),
- run_postlaunch_phase(),
+
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Plugins (prelaunch phase) =="),
+
+ ?LOG_DEBUG("Setting plugins up"),
+ %% `Plugins` contains all the enabled plugins, plus their
+ %% dependencies. The order is important: dependencies appear
+ %% before plugin which depend on them.
+ Plugins = rabbit_plugins:setup(),
+ ?LOG_DEBUG(
+ "Loading the following plugins: ~p", [Plugins]),
+ %% We can load all plugins and refresh their feature flags at
+ %% once, because it does not involve running code from the
+ %% plugins.
+ ok = app_utils:load_applications(Plugins),
+ ok = rabbit_feature_flags:refresh_feature_flags_after_app_load(
+ Plugins),
+
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Boot steps =="),
+
+ ok = rabbit_boot_steps:run_boot_steps([rabbit | Plugins]),
+ rabbit_boot_state:set(core_started),
+ run_postlaunch_phase(Plugins),
{ok, SupPid}
catch
throw:{error, _} = Error ->
@@ -893,50 +933,38 @@ start(normal, []) ->
Error
end.
-run_postlaunch_phase() ->
- spawn(fun() -> do_run_postlaunch_phase() end).
+run_postlaunch_phase(Plugins) ->
+ spawn(fun() -> do_run_postlaunch_phase(Plugins) end).
-do_run_postlaunch_phase() ->
+do_run_postlaunch_phase(Plugins) ->
%% Once RabbitMQ itself is started, we need to run a few more steps,
%% in particular start plugins.
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Postlaunch phase =="),
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Postlaunch phase =="),
try
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Plugins =="),
+ %% Successful boot resets node maintenance state.
+ ?LOG_DEBUG(""),
+ ?LOG_INFO("Resetting node maintenance status"),
+ _ = rabbit_maintenance:unmark_as_being_drained(),
+
+ ?LOG_DEBUG(""),
+ ?LOG_DEBUG("== Plugins (postlaunch phase) =="),
- rabbit_log_prelaunch:debug("Setting plugins up"),
- %% `Plugins` contains all the enabled plugins, plus their
- %% dependencies. The order is important: dependencies appear
- %% before plugin which depend on them.
- Plugins = rabbit_plugins:setup(),
- rabbit_log_prelaunch:debug(
- "Starting the following plugins: ~p", [Plugins]),
- %% We can load all plugins and refresh their feature flags at
- %% once, because it does not involve running code from the
- %% plugins.
- app_utils:load_applications(Plugins),
- ok = rabbit_feature_flags:refresh_feature_flags_after_app_load(
- Plugins),
%% However, we want to run their boot steps and actually start
%% them one by one, to ensure a dependency is fully started
%% before a plugin which depends on it gets a chance to start.
+ ?LOG_DEBUG("Starting the following plugins: ~p", [Plugins]),
lists:foreach(
fun(Plugin) ->
- ok = rabbit_boot_steps:run_boot_steps([Plugin]),
case application:ensure_all_started(Plugin) of
{ok, _} -> ok;
Error -> throw(Error)
end
end, Plugins),
- %% Successful boot resets node maintenance state.
- rabbit_log_prelaunch:info("Resetting node maintenance status"),
- _ = rabbit_maintenance:unmark_as_being_drained(),
-
%% Export definitions after all plugins have been enabled,
- %% see rabbitmq/rabbitmq-server#2384
+ %% see rabbitmq/rabbitmq-server#2384.
case rabbit_definitions:maybe_load_definitions() of
ok -> ok;
DefLoadError -> throw(DefLoadError)
@@ -944,17 +972,16 @@ do_run_postlaunch_phase() ->
%% Start listeners after all plugins have been enabled,
%% see rabbitmq/rabbitmq-server#2405.
- rabbit_log_prelaunch:info(
- "Ready to start client connection listeners"),
+ ?LOG_INFO("Ready to start client connection listeners"),
ok = rabbit_networking:boot(),
%% The node is ready: mark it as such and log it.
%% NOTE: PLEASE DO NOT ADD CRITICAL NODE STARTUP CODE AFTER THIS.
- ok = rabbit_lager:broker_is_started(),
- ok = log_broker_started(
- rabbit_plugins:strictly_plugins(rabbit_plugins:active())),
+ ActivePlugins = rabbit_plugins:active(),
+ StrictlyPlugins = rabbit_plugins:strictly_plugins(ActivePlugins),
+ ok = log_broker_started(StrictlyPlugins),
- rabbit_log_prelaunch:debug("Marking ~s as running", [product_name()]),
+ ?LOG_DEBUG("Marking ~s as running", [product_name()]),
rabbit_boot_state:set(ready)
catch
throw:{error, _} = Error ->
@@ -1003,7 +1030,7 @@ boot_delegate() ->
recover() ->
ok = rabbit_policy:recover(),
ok = rabbit_vhost:recover(),
- ok = lager_exchange_backend:maybe_init_exchange().
+ ok.
-spec maybe_insert_default_data() -> 'ok'.
@@ -1011,18 +1038,20 @@ maybe_insert_default_data() ->
NoDefsToImport = not rabbit_definitions:has_configured_definitions_to_load(),
case rabbit_table:needs_default_data() andalso NoDefsToImport of
true ->
- rabbit_log:info("Will seed default virtual host and user..."),
+ ?LOG_INFO("Will seed default virtual host and user...",
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
insert_default_data();
false ->
- rabbit_log:info("Will not seed default virtual host and user: have definitions to load..."),
+ ?LOG_INFO("Will not seed default virtual host and user: have definitions to load...",
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
ok
end.
insert_default_data() ->
- {ok, DefaultUser} = application:get_env(default_user),
- {ok, DefaultPass} = application:get_env(default_pass),
+ DefaultUser = get_default_data_param(default_user),
+ DefaultPass = get_default_data_param(default_pass),
{ok, DefaultTags} = application:get_env(default_user_tags),
- {ok, DefaultVHost} = application:get_env(default_vhost),
+ DefaultVHost = get_default_data_param(default_vhost),
{ok, [DefaultConfigurePerm, DefaultWritePerm, DefaultReadPerm]} =
application:get_env(default_permissions),
@@ -1034,7 +1063,6 @@ insert_default_data() ->
DefaultReadPermBin = rabbit_data_coercion:to_binary(DefaultReadPerm),
ok = rabbit_vhost:add(DefaultVHostBin, <<"Default virtual host">>, [], ?INTERNAL_USER),
- ok = lager_exchange_backend:maybe_init_exchange(),
ok = rabbit_auth_backend_internal:add_user(
DefaultUserBin,
DefaultPassBin,
@@ -1050,12 +1078,28 @@ insert_default_data() ->
?INTERNAL_USER),
ok.
+get_default_data_param(Param) ->
+ #{var_origins := Origins} = Context = rabbit_prelaunch:get_context(),
+ case maps:get(Param, Origins, default) of
+ environment ->
+ Value = maps:get(Param, Context),
+ ?assert(is_binary(Value)),
+ Value;
+ default ->
+ {ok, Value} = application:get_env(Param),
+ Value
+ end.
+
%%---------------------------------------------------------------------------
%% logging
--spec log_locations() -> [rabbit_lager:log_location()].
+-spec set_log_level(logger:level()) -> ok.
+set_log_level(Level) ->
+ rabbit_prelaunch_logging:set_log_level(Level).
+
+-spec log_locations() -> [rabbit_prelaunch_logging:log_location()].
log_locations() ->
- rabbit_lager:log_locations().
+ rabbit_prelaunch_logging:log_locations().
-spec config_locations() -> [rabbit_config:config_location()].
config_locations() ->
@@ -1086,7 +1130,8 @@ log_broker_started(Plugins) ->
Message = string:strip(rabbit_misc:format(
"Server startup complete; ~b plugins started.~n~s",
[length(Plugins), PluginList]), right, $\n),
- rabbit_log:info(Message),
+ ?LOG_INFO(Message,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
io:format(" completed with ~p plugins.~n", [length(Plugins)]).
-define(RABBIT_TEXT_LOGO,
@@ -1153,19 +1198,34 @@ print_banner() ->
io:format(Logo ++
"~n" ++
MOTDFormat ++
- "~n Doc guides: https://rabbitmq.com/documentation.html"
- "~n Support: https://rabbitmq.com/contact.html"
- "~n Tutorials: https://rabbitmq.com/getstarted.html"
- "~n Monitoring: https://rabbitmq.com/monitoring.html"
+ "~n Erlang: ~ts [~ts]"
+ "~n TLS Library: ~ts"
+ "~n"
+ "~n Doc guides: https://rabbitmq.com/documentation.html"
+ "~n Support: https://rabbitmq.com/contact.html"
+ "~n Tutorials: https://rabbitmq.com/getstarted.html"
+ "~n Monitoring: https://rabbitmq.com/monitoring.html"
"~n"
"~n Logs: ~ts" ++ LogFmt ++ "~n"
"~n Config file(s): ~ts" ++ CfgFmt ++ "~n"
"~n Starting broker...",
[Product, Version, ?COPYRIGHT_MESSAGE, ?INFORMATION_MESSAGE] ++
+ [rabbit_misc:otp_release(), emu_flavor(), crypto_version()] ++
MOTDArgs ++
LogLocations ++
CfgLocations).
+emu_flavor() ->
+ %% emu_flavor was introduced in Erlang 24 so we need to catch the error on Erlang 23
+ case catch(erlang:system_info(emu_flavor)) of
+ {'EXIT', _} -> "emu";
+ EmuFlavor -> EmuFlavor
+ end.
+
+crypto_version() ->
+ [{CryptoLibName, _, CryptoLibVersion}] = crypto:info_lib(),
+ [CryptoLibName, " - ", CryptoLibVersion].
+
log_motd() ->
case motd() of
undefined ->
@@ -1177,7 +1237,8 @@ log_motd() ->
_ -> [" ", Line, "\n"]
end
|| Line <- Lines],
- rabbit_log:info("~n~ts", [string:trim(Padded, trailing, [$\r, $\n])])
+ ?LOG_INFO("~n~ts", [string:trim(Padded, trailing, [$\r, $\n])],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})
end.
log_banner() ->
@@ -1208,7 +1269,8 @@ log_banner() ->
{K, V} ->
Format(K, V)
end || S <- Settings]), right, $\n),
- rabbit_log:info("~n~ts", [Banner]).
+ ?LOG_INFO("~n~ts", [Banner],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}).
warn_if_kernel_config_dubious() ->
case os:type() of
@@ -1217,16 +1279,18 @@ warn_if_kernel_config_dubious() ->
_ ->
case erlang:system_info(kernel_poll) of
true -> ok;
- false -> rabbit_log:warning(
- "Kernel poll (epoll, kqueue, etc) is disabled. Throughput "
- "and CPU utilization may worsen.~n")
+ false -> ?LOG_WARNING(
+ "Kernel poll (epoll, kqueue, etc) is disabled. "
+ "Throughput and CPU utilization may worsen.",
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})
end
end,
- AsyncThreads = erlang:system_info(thread_pool_size),
- case AsyncThreads < ?ASYNC_THREADS_WARNING_THRESHOLD of
- true -> rabbit_log:warning(
- "Erlang VM is running with ~b I/O threads, "
- "file I/O performance may worsen~n", [AsyncThreads]);
+ DirtyIOSchedulers = erlang:system_info(dirty_io_schedulers),
+ case DirtyIOSchedulers < ?DIRTY_IO_SCHEDULERS_WARNING_THRESHOLD of
+ true -> ?LOG_WARNING(
+ "Erlang VM is running with ~b dirty I/O schedulers, "
+ "file I/O performance may worsen", [DirtyIOSchedulers],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL});
false -> ok
end,
IDCOpts = case application:get_env(kernel, inet_default_connect_options) of
@@ -1234,8 +1298,9 @@ warn_if_kernel_config_dubious() ->
{ok, Val} -> Val
end,
case proplists:get_value(nodelay, IDCOpts, false) of
- false -> rabbit_log:warning("Nagle's algorithm is enabled for sockets, "
- "network I/O latency will be higher~n");
+ false -> ?LOG_WARNING("Nagle's algorithm is enabled for sockets, "
+ "network I/O latency will be higher",
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL});
true -> ok
end.
@@ -1251,7 +1316,8 @@ warn_if_disc_io_options_dubious() ->
CreditDiscBound, IoBatchSize) of
ok -> ok;
{error, {Reason, Vars}} ->
- rabbit_log:warning(Reason, Vars)
+ ?LOG_WARNING(Reason, Vars,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})
end.
validate_msg_store_io_batch_size_and_credit_disc_bound(CreditDiscBound,
@@ -1263,7 +1329,7 @@ validate_msg_store_io_batch_size_and_credit_disc_bound(CreditDiscBound,
if IoBatchSize < ?IO_BATCH_SIZE ->
throw({error,
{"io_batch_size of ~b lower than recommended value ~b, "
- "paging performance may worsen~n",
+ "paging performance may worsen",
[IoBatchSize, ?IO_BATCH_SIZE]}});
true ->
ok
@@ -1284,7 +1350,7 @@ validate_msg_store_io_batch_size_and_credit_disc_bound(CreditDiscBound,
throw({error,
{"msg_store_credit_disc_bound {~b, ~b} lower than"
"recommended value {~b, ~b},"
- " paging performance may worsen~n",
+ " paging performance may worsen",
[IC, MCA, RIC, RMCA]}});
true ->
ok
@@ -1312,7 +1378,7 @@ validate_msg_store_io_batch_size_and_credit_disc_bound(CreditDiscBound,
{error,
{"msg_store_io_batch_size ~b should be bigger than the initial "
"credit value from msg_store_credit_disc_bound ~b,"
- " paging performance may worsen~n",
+ " paging performance may worsen",
[IoBatchSize, InitialCredit]}});
true ->
ok
@@ -1459,7 +1525,7 @@ motd() ->
home_dir() ->
case init:get_argument(home) of
- {ok, [[Home]]} -> Home;
+ {ok, [[Home]]} -> filename:absname(Home);
Other -> Other
end.
@@ -1490,8 +1556,10 @@ ensure_working_fhc() ->
{ok, true} -> "ON";
{ok, false} -> "OFF"
end,
- rabbit_log:info("FHC read buffering: ~s~n", [ReadBuf]),
- rabbit_log:info("FHC write buffering: ~s~n", [WriteBuf]),
+ ?LOG_INFO("FHC read buffering: ~s", [ReadBuf],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ ?LOG_INFO("FHC write buffering: ~s", [WriteBuf],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
Filename = filename:join(code:lib_dir(kernel, ebin), "kernel.app"),
{ok, Fd} = file_handle_cache:open(Filename, [raw, binary, read], []),
{ok, _} = file_handle_cache:read(Fd, 1),
diff --git a/deps/rabbit/src/rabbit_access_control.erl b/deps/rabbit/src/rabbit_access_control.erl
index 72260d5723..d9670d87ec 100644
--- a/deps/rabbit/src/rabbit_access_control.erl
+++ b/deps/rabbit/src/rabbit_access_control.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_access_control).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([check_user_pass_login/2, check_user_login/2, check_user_loopback/2,
check_vhost_access/4, check_resource_access/4, check_topic_access/4]).
@@ -38,35 +38,42 @@ check_user_pass_login(Username, Password) ->
check_user_login(Username, AuthProps) ->
%% extra auth properties like MQTT client id are in AuthProps
{ok, Modules} = application:get_env(rabbit, auth_backends),
- R = lists:foldl(
- fun (rabbit_auth_backend_cache=ModN, {refused, _, _, _}) ->
- %% It is possible to specify authn/authz within the cache module settings,
- %% so we have to do both auth steps here
- %% See this rabbitmq-users discussion:
- %% https://groups.google.com/d/topic/rabbitmq-users/ObqM7MQdA3I/discussion
- try_authenticate_and_try_authorize(ModN, ModN, Username, AuthProps);
- ({ModN, ModZs}, {refused, _, _, _}) ->
- %% Different modules for authN vs authZ. So authenticate
- %% with authN module, then if that succeeds do
- %% passwordless (i.e pre-authenticated) login with authZ.
- try_authenticate_and_try_authorize(ModN, ModZs, Username, AuthProps);
- (Mod, {refused, _, _, _}) ->
- %% Same module for authN and authZ. Just take the result
- %% it gives us
- case try_authenticate(Mod, Username, AuthProps) of
- {ok, ModNUser = #auth_user{username = Username2, impl = Impl}} ->
- rabbit_log:debug("User '~s' authenticated successfully by backend ~s", [Username2, Mod]),
- user(ModNUser, {ok, [{Mod, Impl}], []});
- Else ->
- rabbit_log:debug("User '~s' failed authenticatation by backend ~s", [Username, Mod]),
- Else
- end;
- (_, {ok, User}) ->
- %% We've successfully authenticated. Skip to the end...
- {ok, User}
- end,
- {refused, Username, "No modules checked '~s'", [Username]}, Modules),
- R.
+ try
+ lists:foldl(
+ fun (rabbit_auth_backend_cache=ModN, {refused, _, _, _}) ->
+ %% It is possible to specify authn/authz within the cache module settings,
+ %% so we have to do both auth steps here
+ %% See this rabbitmq-users discussion:
+ %% https://groups.google.com/d/topic/rabbitmq-users/ObqM7MQdA3I/discussion
+ try_authenticate_and_try_authorize(ModN, ModN, Username, AuthProps);
+ ({ModN, ModZs}, {refused, _, _, _}) ->
+ %% Different modules for authN vs authZ. So authenticate
+ %% with authN module, then if that succeeds do
+ %% passwordless (i.e pre-authenticated) login with authZ.
+ try_authenticate_and_try_authorize(ModN, ModZs, Username, AuthProps);
+ (Mod, {refused, _, _, _}) ->
+ %% Same module for authN and authZ. Just take the result
+ %% it gives us
+ case try_authenticate(Mod, Username, AuthProps) of
+ {ok, ModNUser = #auth_user{username = Username2, impl = Impl}} ->
+ rabbit_log:debug("User '~s' authenticated successfully by backend ~s", [Username2, Mod]),
+ user(ModNUser, {ok, [{Mod, Impl}], []});
+ Else ->
+ rabbit_log:debug("User '~s' failed authenticatation by backend ~s", [Username, Mod]),
+ Else
+ end;
+ (_, {ok, User}) ->
+ %% We've successfully authenticated. Skip to the end...
+ {ok, User}
+ end,
+ {refused, Username, "No modules checked '~s'", [Username]}, Modules)
+ catch
+ Type:Error:Stacktrace ->
+ rabbit_log:debug("User '~s' authentication failed with ~s:~p:~n~p", [Username, Type, Error, Stacktrace]),
+ {refused, Username, "User '~s' authentication failed with internal error. "
+ "Enable debug logs to see the real error.", [Username]}
+
+ end.
try_authenticate_and_try_authorize(ModN, ModZs0, Username, AuthProps) ->
ModZs = case ModZs0 of
@@ -85,7 +92,7 @@ try_authenticate(Module, Username, AuthProps) ->
case Module:user_login_authentication(Username, AuthProps) of
{ok, AuthUser} -> {ok, AuthUser};
{error, E} -> {refused, Username,
- "~s failed authenticating ~s: ~p~n",
+ "~s failed authenticating ~s: ~p",
[Module, Username, E]};
{refused, F, A} -> {refused, Username, F, A}
end.
@@ -97,7 +104,7 @@ try_authorize(Modules, Username, AuthProps) ->
{ok, Impl, Tags}-> {ok, [{Module, Impl} | ModsImpls], ModsTags ++ Tags};
{ok, Impl} -> {ok, [{Module, Impl} | ModsImpls], ModsTags};
{error, E} -> {refused, Username,
- "~s failed authorizing ~s: ~p~n",
+ "~s failed authorizing ~s: ~p",
[Module, Username, E]};
{refused, F, A} -> {refused, Username, F, A}
end;
@@ -215,7 +222,7 @@ check_access(Fun, Module, ErrStr, ErrArgs, ErrName) ->
false ->
rabbit_misc:protocol_error(ErrName, ErrStr, ErrArgs);
{error, E} ->
- FullErrStr = ErrStr ++ ", backend ~s returned an error: ~p~n",
+ FullErrStr = ErrStr ++ ", backend ~s returned an error: ~p",
FullErrArgs = ErrArgs ++ [Module, E],
rabbit_log:error(FullErrStr, FullErrArgs),
rabbit_misc:protocol_error(ErrName, FullErrStr, FullErrArgs)
diff --git a/deps/rabbit/src/rabbit_alarm.erl b/deps/rabbit/src/rabbit_alarm.erl
index 3f1ab7ae62..e9942136a9 100644
--- a/deps/rabbit/src/rabbit_alarm.erl
+++ b/deps/rabbit/src/rabbit_alarm.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% There are two types of alarms handled by this module:
%%
@@ -232,7 +232,7 @@ handle_event({node_down, Node}, #alarms{alarmed_nodes = AN} = State) ->
error -> []
end,
{ok, lists:foldr(fun(Source, AccState) ->
- rabbit_log:warning("~s resource limit alarm cleared for dead node ~p~n",
+ rabbit_log:warning("~s resource limit alarm cleared for dead node ~p",
[Source, Node]),
maybe_alert(fun dict_unappend/3, Node, Source, false, AccState)
end, State, AlarmsForDeadNode)};
@@ -284,7 +284,7 @@ maybe_alert(UpdateFun, Node, Source, WasAlertAdded,
StillHasAlerts = lists:any(fun ({_Node, NodeAlerts}) -> lists:member(Source, NodeAlerts) end, dict:to_list(AN1)),
case StillHasAlerts of
true -> ok;
- false -> rabbit_log:warning("~s resource limit alarm cleared across the cluster~n", [Source])
+ false -> rabbit_log:warning("~s resource limit alarm cleared across the cluster", [Source])
end,
Alert = {WasAlertAdded, StillHasAlerts, Node},
case node() of
@@ -336,11 +336,11 @@ handle_set_alarm({file_descriptor_limit, []}, State) ->
"********************************************************************~n"),
{ok, State};
handle_set_alarm(Alarm, State) ->
- rabbit_log:warning("alarm '~p' set~n", [Alarm]),
+ rabbit_log:warning("alarm '~p' set", [Alarm]),
{ok, State}.
handle_clear_resource_alarm(Source, Node, State) ->
- rabbit_log:warning("~s resource limit alarm cleared on node ~p~n",
+ rabbit_log:warning("~s resource limit alarm cleared on node ~p",
[Source, Node]),
{ok, maybe_alert(fun dict_unappend/3, Node, Source, false, State)}.
@@ -348,7 +348,7 @@ handle_clear_alarm(file_descriptor_limit, State) ->
rabbit_log:warning("file descriptor limit alarm cleared~n"),
{ok, State};
handle_clear_alarm(Alarm, State) ->
- rabbit_log:warning("alarm '~p' cleared~n", [Alarm]),
+ rabbit_log:warning("alarm '~p' cleared", [Alarm]),
{ok, State}.
is_node_alarmed(Source, Node, #alarms{alarmed_nodes = AN}) ->
diff --git a/deps/rabbit/src/rabbit_amqqueue.erl b/deps/rabbit/src/rabbit_amqqueue.erl
index cd5f894680..91f4a3f130 100644
--- a/deps/rabbit/src/rabbit_amqqueue.erl
+++ b/deps/rabbit/src/rabbit_amqqueue.erl
@@ -2,11 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqqueue).
+-export([store_queue_ram_dirty/1]).
-export([warn_file_limit/0]).
-export([recover/1, stop/1, start/1, declare/6, declare/7,
delete_immediately/1, delete_exclusive/2, delete/4, purge/1,
@@ -35,13 +36,15 @@
-export([update/2, store_queue/1, update_decorators/1, policy_changed/2]).
-export([update_mirroring/1, sync_mirrors/1, cancel_sync_mirrors/1]).
-export([emit_unresponsive/6, emit_unresponsive_local/5, is_unresponsive/2]).
--export([has_synchronised_mirrors_online/1]).
+-export([has_synchronised_mirrors_online/1, is_match/2, is_in_virtual_host/2]).
-export([is_replicated/1, is_exclusive/1, is_not_exclusive/1, is_dead_exclusive/1]).
--export([list_local_quorum_queues/0, list_local_quorum_queue_names/0,
+-export([list_local_quorum_queues/0, list_local_quorum_queue_names/0, list_local_stream_queues/0,
list_local_mirrored_classic_queues/0, list_local_mirrored_classic_names/0,
list_local_leaders/0, list_local_followers/0, get_quorum_nodes/1,
list_local_mirrored_classic_without_synchronised_mirrors/0,
- list_local_mirrored_classic_without_synchronised_mirrors_for_cli/0]).
+ list_local_mirrored_classic_without_synchronised_mirrors_for_cli/0,
+ list_local_quorum_queues_with_name_matching/1,
+ list_local_quorum_queues_with_name_matching/2]).
-export([ensure_rabbit_queue_record_is_initialized/1]).
-export([format/1]).
-export([delete_immediately_by_resource/1]).
@@ -55,11 +58,13 @@
-export([rebalance/3]).
-export([collect_info_all/2]).
--export([is_policy_applicable/2]).
+-export([is_policy_applicable/2, declare_args/0, consume_args/0]).
-export([is_server_named_allowed/1]).
-export([check_max_age/1]).
--export([get_queue_type/1]).
+-export([get_queue_type/1, get_resource_vhost_name/1, get_resource_name/1]).
+
+-export([deactivate_limit_all/2]).
%% internal
-export([internal_declare/2, internal_delete/2, run_backing_queue/3,
@@ -73,8 +78,6 @@
-define(INTEGER_ARG_TYPES, [byte, short, signedint, long,
unsignedbyte, unsignedshort, unsignedint]).
--define(MORE_CONSUMER_CREDIT_AFTER, 50).
-
-define(IS_CLASSIC(QPid), is_pid(QPid)).
-define(IS_QUORUM(QPid), is_tuple(QPid)).
%%----------------------------------------------------------------------------
@@ -111,7 +114,7 @@ warn_file_limit() ->
case file_handle_cache:get_limit() < L of
true ->
rabbit_log:warning(
- "Recovering ~p queues, available file handles: ~p. Please increase max open file handles limit to at least ~p!~n",
+ "Recovering ~p queues, available file handles: ~p. Please increase max open file handles limit to at least ~p!",
[L, file_handle_cache:get_limit(), L]);
false ->
ok
@@ -222,7 +225,7 @@ declare(QueueName = #resource{virtual_host = VHost}, Durable, AutoDelete, Args,
Type = get_queue_type(Args),
case rabbit_queue_type:is_enabled(Type) of
true ->
- Q0 = amqqueue:new(QueueName,
+ Q = amqqueue:new(QueueName,
none,
Durable,
AutoDelete,
@@ -231,8 +234,6 @@ declare(QueueName = #resource{virtual_host = VHost}, Durable, AutoDelete, Args,
VHost,
#{user => ActingUser},
Type),
- Q = rabbit_queue_decorator:set(
- rabbit_policy:set(Q0)),
rabbit_queue_type:declare(Q, Node);
false ->
{protocol_error, internal_error,
@@ -332,6 +333,9 @@ store_queue(Q) when not ?amqqueue_is_durable(Q) ->
store_queue_ram(Q) ->
ok = mnesia:write(rabbit_queue, rabbit_queue_decorator:set(Q), write).
+store_queue_ram_dirty(Q) ->
+ ok = mnesia:dirty_write(rabbit_queue, rabbit_queue_decorator:set(Q)).
+
-spec update_decorators(name()) -> 'ok'.
update_decorators(Name) ->
@@ -436,7 +440,7 @@ rebalance(Type, VhostSpec, QueueSpec) ->
maybe_rebalance({true, Id}, Type, VhostSpec, QueueSpec) ->
rabbit_log:info("Starting queue rebalance operation: '~s' for vhosts matching '~s' and queues matching '~s'",
[Type, VhostSpec, QueueSpec]),
- Running = rabbit_nodes:all_running(),
+ Running = rabbit_maintenance:filter_out_drained_nodes_consistent_read(rabbit_nodes:all_running()),
NumRunning = length(Running),
ToRebalance = [Q || Q <- rabbit_amqqueue:list(),
filter_per_type(Type, Q),
@@ -458,8 +462,9 @@ maybe_rebalance(false, _Type, _VhostSpec, _QueueSpec) ->
rabbit_log:warning("Queue rebalance operation is in progress, please wait."),
{error, rebalance_in_progress}.
-filter_per_type(all, _) ->
- true;
+%% Stream queues don't yet support rebalance
+filter_per_type(all, Q) ->
+ ?amqqueue_is_quorum(Q) or ?amqqueue_is_classic(Q);
filter_per_type(quorum, Q) ->
?amqqueue_is_quorum(Q);
filter_per_type(classic, Q) ->
@@ -470,11 +475,14 @@ rebalance_module(Q) when ?amqqueue_is_quorum(Q) ->
rebalance_module(Q) when ?amqqueue_is_classic(Q) ->
rabbit_mirror_queue_misc.
-get_resource_name(#resource{name = Name}) ->
+get_resource_name(#resource{name = Name}) ->
Name.
-is_match(Subj, E) ->
- nomatch /= re:run(Subj, E).
+get_resource_vhost_name(#resource{virtual_host = VHostName}) ->
+ VHostName.
+
+is_match(Subj, RegEx) ->
+ nomatch /= re:run(Subj, RegEx).
iterative_rebalance(ByNode, MaxQueuesDesired) ->
case maybe_migrate(ByNode, MaxQueuesDesired) of
@@ -496,24 +504,24 @@ maybe_migrate(ByNode, _, []) ->
?amqqueue_is_classic(Q)
end, V),
[[{<<"Node name">>, K}, {<<"Number of quorum queues">>, length(QQs)},
- {<<"Number of classic queues">>, length(CQs)}] | Acc]
+ {<<"Number of replicated classic queues">>, length(CQs)}] | Acc]
end, [], ByNode)};
maybe_migrate(ByNode, MaxQueuesDesired, [N | Nodes]) ->
case maps:get(N, ByNode, []) of
[{_, Q, false} = Queue | Queues] = All when length(All) > MaxQueuesDesired ->
Name = amqqueue:get_name(Q),
Module = rebalance_module(Q),
- OtherNodes = Module:get_replicas(Q) -- [N],
- case OtherNodes of
+ Candidates = rabbit_maintenance:filter_out_drained_nodes_local_read(Module:get_replicas(Q) -- [N]),
+ case Candidates of
[] ->
{not_migrated, update_not_migrated_queue(N, Queue, Queues, ByNode)};
_ ->
- [{Length, Destination} | _] = sort_by_number_of_queues(OtherNodes, ByNode),
- rabbit_log:warning("Migrating queue ~p from node ~p with ~p queues to node ~p with ~p queues",
+ [{Length, Destination} | _] = sort_by_number_of_queues(Candidates, ByNode),
+ rabbit_log:info("Migrating queue ~p from node ~p with ~p queues to node ~p with ~p queues",
[Name, N, length(All), Destination, Length]),
case Module:transfer_leadership(Q, Destination) of
{migrated, NewNode} ->
- rabbit_log:warning("Queue ~p migrated to ~p", [Name, NewNode]),
+ rabbit_log:info("Queue ~p migrated to ~p", [Name, NewNode]),
{migrated, update_migrated_queue(Destination, N, Queue, Queues, ByNode)};
{not_migrated, Reason} ->
rabbit_log:warning("Error migrating queue ~p: ~p", [Name, Reason]),
@@ -525,7 +533,7 @@ maybe_migrate(ByNode, MaxQueuesDesired, [N | Nodes]) ->
"Do nothing", [N, length(All)]),
maybe_migrate(ByNode, MaxQueuesDesired, Nodes);
All ->
- rabbit_log:warning("Node ~p only contains ~p queues, do nothing",
+ rabbit_log:debug("Node ~p only contains ~p queues, do nothing",
[N, length(All)]),
maybe_migrate(ByNode, MaxQueuesDesired, Nodes)
end.
@@ -552,7 +560,7 @@ group_by_node(Queues) ->
Module = rebalance_module(Q),
Length = Module:queue_length(Q),
maps:update_with(amqqueue:qnode(Q),
- fun(L) -> [{Length, Q, false} | L] end,
+ fun(L) -> [{Length, Q, false} | L] end,
[{Length, Q, false}], Acc)
end, #{}, Queues),
maps:map(fun(_K, V) -> lists:keysort(1, V) end, ByNode).
@@ -624,7 +632,7 @@ retry_wait(Q, F, E, RetriesLeft) ->
% The old check would have crashed here,
% instead, log it and run the exit fun. absent & alive is weird,
% but better than crashing with badmatch,true
- rabbit_log:debug("Unexpected alive queue process ~p~n", [QPid]),
+ rabbit_log:debug("Unexpected alive queue process ~p", [QPid]),
E({absent, Q, alive});
false ->
ok % Expected result
@@ -675,7 +683,7 @@ priv_absent(QueueName, QPid, true, nodedown) ->
rabbit_misc:protocol_error(
not_found,
"home node '~s' of durable ~s is down or inaccessible",
- [node(QPid), rabbit_misc:rs(QueueName)]);
+ [amqqueue:qnode(QPid), rabbit_misc:rs(QueueName)]);
priv_absent(QueueName, _QPid, _IsDurable, stopped) ->
rabbit_misc:protocol_error(
@@ -781,27 +789,41 @@ declare_args() ->
{<<"x-queue-type">>, fun check_queue_type/2},
{<<"x-quorum-initial-group-size">>, fun check_initial_cluster_size_arg/2},
{<<"x-max-age">>, fun check_max_age_arg/2},
- {<<"x-max-segment-size">>, fun check_non_neg_int_arg/2},
+ {<<"x-stream-max-segment-size-bytes">>, fun check_non_neg_int_arg/2},
{<<"x-initial-cluster-size">>, fun check_initial_cluster_size_arg/2},
{<<"x-queue-leader-locator">>, fun check_queue_leader_locator_arg/2}].
consume_args() -> [{<<"x-priority">>, fun check_int_arg/2},
- {<<"x-cancel-on-ha-failover">>, fun check_bool_arg/2}].
+ {<<"x-cancel-on-ha-failover">>, fun check_bool_arg/2},
+ {<<"x-stream-offset">>, fun check_stream_offset_arg/2}].
check_int_arg({Type, _}, _) ->
case lists:member(Type, ?INTEGER_ARG_TYPES) of
true -> ok;
- false -> {error, {unacceptable_type, Type}}
- end.
+ false -> {error, rabbit_misc:format("expected integer, got ~p", [Type])}
+ end;
+check_int_arg(Val, _) when is_integer(Val) ->
+ ok;
+check_int_arg(_Val, _) ->
+ {error, {unacceptable_type, "expected integer"}}.
check_bool_arg({bool, _}, _) -> ok;
-check_bool_arg({Type, _}, _) -> {error, {unacceptable_type, Type}}.
+check_bool_arg({Type, _}, _) -> {error, {unacceptable_type, Type}};
+check_bool_arg(true, _) -> ok;
+check_bool_arg(false, _) -> ok;
+check_bool_arg(_Val, _) -> {error, {unacceptable_type, "expected boolean"}}.
check_non_neg_int_arg({Type, Val}, Args) ->
case check_int_arg({Type, Val}, Args) of
ok when Val >= 0 -> ok;
ok -> {error, {value_negative, Val}};
Error -> Error
+ end;
+check_non_neg_int_arg(Val, Args) ->
+ case check_int_arg(Val, Args) of
+ ok when Val >= 0 -> ok;
+ ok -> {error, {value_negative, Val}};
+ Error -> Error
end.
check_expires_arg({Type, Val}, Args) ->
@@ -809,12 +831,23 @@ check_expires_arg({Type, Val}, Args) ->
ok when Val == 0 -> {error, {value_zero, Val}};
ok -> rabbit_misc:check_expiry(Val);
Error -> Error
+ end;
+check_expires_arg(Val, Args) ->
+ case check_int_arg(Val, Args) of
+ ok when Val == 0 -> {error, {value_zero, Val}};
+ ok -> rabbit_misc:check_expiry(Val);
+ Error -> Error
end.
check_message_ttl_arg({Type, Val}, Args) ->
case check_int_arg({Type, Val}, Args) of
ok -> rabbit_misc:check_expiry(Val);
Error -> Error
+ end;
+check_message_ttl_arg(Val, Args) ->
+ case check_int_arg(Val, Args) of
+ ok -> rabbit_misc:check_expiry(Val);
+ Error -> Error
end.
check_max_priority_arg({Type, Val}, Args) ->
@@ -822,19 +855,30 @@ check_max_priority_arg({Type, Val}, Args) ->
ok when Val =< ?MAX_SUPPORTED_PRIORITY -> ok;
ok -> {error, {max_value_exceeded, Val}};
Error -> Error
+ end;
+check_max_priority_arg(Val, Args) ->
+ case check_non_neg_int_arg(Val, Args) of
+ ok when Val =< ?MAX_SUPPORTED_PRIORITY -> ok;
+ ok -> {error, {max_value_exceeded, Val}};
+ Error -> Error
end.
check_single_active_consumer_arg({Type, Val}, Args) ->
- case check_bool_arg({Type, Val}, Args) of
- ok -> ok;
- Error -> Error
- end.
+ check_bool_arg({Type, Val}, Args);
+check_single_active_consumer_arg(Val, Args) ->
+ check_bool_arg(Val, Args).
check_initial_cluster_size_arg({Type, Val}, Args) ->
case check_non_neg_int_arg({Type, Val}, Args) of
ok when Val == 0 -> {error, {value_zero, Val}};
ok -> ok;
Error -> Error
+ end;
+check_initial_cluster_size_arg(Val, Args) ->
+ case check_non_neg_int_arg(Val, Args) of
+ ok when Val == 0 -> {error, {value_zero, Val}};
+ ok -> ok;
+ Error -> Error
end.
check_max_age_arg({longstr, Val}, _Args) ->
@@ -882,7 +926,9 @@ unit_value_in_ms("s") ->
%% Note that the validity of x-dead-letter-exchange is already verified
%% by rabbit_channel's queue.declare handler.
check_dlxname_arg({longstr, _}, _) -> ok;
-check_dlxname_arg({Type, _}, _) -> {error, {unacceptable_type, Type}}.
+check_dlxname_arg({Type, _}, _) -> {error, {unacceptable_type, Type}};
+check_dlxname_arg(Val, _) when is_list(Val) or is_binary(Val) -> ok;
+check_dlxname_arg(_Val, _) -> {error, {unacceptable_type, "expected a string (valid exchange name)"}}.
check_dlxrk_arg({longstr, _}, Args) ->
case rabbit_misc:table_lookup(Args, <<"x-dead-letter-exchange">>) of
@@ -890,43 +936,86 @@ check_dlxrk_arg({longstr, _}, Args) ->
_ -> ok
end;
check_dlxrk_arg({Type, _}, _Args) ->
- {error, {unacceptable_type, Type}}.
+ {error, {unacceptable_type, Type}};
+check_dlxrk_arg(Val, Args) when is_binary(Val) ->
+ case rabbit_misc:table_lookup(Args, <<"x-dead-letter-exchange">>) of
+ undefined -> {error, routing_key_but_no_dlx_defined};
+ _ -> ok
+ end;
+check_dlxrk_arg(_Val, _Args) ->
+ {error, {unacceptable_type, "expected a string"}}.
+-define(KNOWN_OVERFLOW_MODES, [<<"drop-head">>, <<"reject-publish">>, <<"reject-publish-dlx">>]).
check_overflow({longstr, Val}, _Args) ->
- case lists:member(Val, [<<"drop-head">>,
- <<"reject-publish">>,
- <<"reject-publish-dlx">>]) of
+ case lists:member(Val, ?KNOWN_OVERFLOW_MODES) of
true -> ok;
false -> {error, invalid_overflow}
end;
check_overflow({Type, _}, _Args) ->
- {error, {unacceptable_type, Type}}.
+ {error, {unacceptable_type, Type}};
+check_overflow(Val, _Args) when is_binary(Val) ->
+ case lists:member(Val, ?KNOWN_OVERFLOW_MODES) of
+ true -> ok;
+ false -> {error, invalid_overflow}
+ end;
+check_overflow(_Val, _Args) ->
+ {error, invalid_overflow}.
+-define(KNOWN_LEADER_LOCATORS, [<<"client-local">>, <<"random">>, <<"least-leaders">>]).
check_queue_leader_locator_arg({longstr, Val}, _Args) ->
- case lists:member(Val, [<<"client-local">>,
- <<"random">>,
- <<"least-leaders">>]) of
+ case lists:member(Val, ?KNOWN_LEADER_LOCATORS) of
true -> ok;
false -> {error, invalid_queue_locator_arg}
end;
check_queue_leader_locator_arg({Type, _}, _Args) ->
- {error, {unacceptable_type, Type}}.
+ {error, {unacceptable_type, Type}};
+check_queue_leader_locator_arg(Val, _Args) when is_binary(Val) ->
+ case lists:member(Val, ?KNOWN_LEADER_LOCATORS) of
+ true -> ok;
+ false -> {error, invalid_queue_locator_arg}
+ end;
+check_queue_leader_locator_arg(_Val, _Args) ->
+ {error, invalid_queue_locator_arg}.
+
+check_stream_offset_arg(Val, _Args) ->
+ case rabbit_stream_queue:parse_offset_arg(Val) of
+ {ok, _} ->
+ ok;
+ {error, _} ->
+ {error, {invalid_stream_offset_arg, Val}}
+ end.
+-define(KNOWN_QUEUE_MODES, [<<"default">>, <<"lazy">>]).
check_queue_mode({longstr, Val}, _Args) ->
- case lists:member(Val, [<<"default">>, <<"lazy">>]) of
+ case lists:member(Val, ?KNOWN_QUEUE_MODES) of
true -> ok;
- false -> {error, invalid_queue_mode}
+ false -> {error, rabbit_misc:format("unsupported queue mode '~s'", [Val])}
end;
check_queue_mode({Type, _}, _Args) ->
- {error, {unacceptable_type, Type}}.
+ {error, {unacceptable_type, Type}};
+check_queue_mode(Val, _Args) when is_binary(Val) ->
+ case lists:member(Val, ?KNOWN_QUEUE_MODES) of
+ true -> ok;
+ false -> {error, rabbit_misc:format("unsupported queue mode '~s'", [Val])}
+ end;
+check_queue_mode(_Val, _Args) ->
+ {error, invalid_queue_mode}.
+-define(KNOWN_QUEUE_TYPES, [<<"classic">>, <<"quorum">>, <<"stream">>]).
check_queue_type({longstr, Val}, _Args) ->
- case lists:member(Val, [<<"classic">>, <<"quorum">>, <<"stream">>]) of
+ case lists:member(Val, ?KNOWN_QUEUE_TYPES) of
true -> ok;
- false -> {error, invalid_queue_type}
+ false -> {error, rabbit_misc:format("unsupported queue type '~s'", [Val])}
end;
check_queue_type({Type, _}, _Args) ->
- {error, {unacceptable_type, Type}}.
+ {error, {unacceptable_type, Type}};
+check_queue_type(Val, _Args) when is_binary(Val) ->
+ case lists:member(Val, ?KNOWN_QUEUE_TYPES) of
+ true -> ok;
+ false -> {error, rabbit_misc:format("unsupported queue type '~s'", [Val])}
+ end;
+check_queue_type(_Val, _Args) ->
+ {error, invalid_queue_type}.
-spec list() -> [amqqueue:amqqueue()].
@@ -953,8 +1042,8 @@ list_local_names() ->
list_local_names_down() ->
[ amqqueue:get_name(Q) || Q <- list(),
- is_down(Q),
- is_local_to_node(amqqueue:get_pid(Q), node())].
+ is_local_to_node(amqqueue:get_pid(Q), node()),
+ is_down(Q)].
is_down(Q) ->
try
@@ -999,6 +1088,7 @@ sample_n(Queues, N) when is_list(Queues) andalso is_integer(N) andalso N > 0 ->
list_by_type(classic) -> list_by_type(rabbit_classic_queue);
list_by_type(quorum) -> list_by_type(rabbit_quorum_queue);
+list_by_type(stream) -> list_by_type(rabbit_stream_queue);
list_by_type(Type) ->
{atomic, Qs} =
mnesia:sync_transaction(
@@ -1022,6 +1112,12 @@ list_local_quorum_queues() ->
amqqueue:get_state(Q) =/= crashed,
lists:member(node(), get_quorum_nodes(Q))].
+-spec list_local_stream_queues() -> [amqqueue:amqqueue()].
+list_local_stream_queues() ->
+ [ Q || Q <- list_by_type(stream),
+ amqqueue:get_state(Q) =/= crashed,
+ lists:member(node(), get_quorum_nodes(Q))].
+
-spec list_local_leaders() -> [amqqueue:amqqueue()].
list_local_leaders() ->
[ Q || Q <- list(),
@@ -1080,13 +1176,32 @@ list_local_mirrored_classic_without_synchronised_mirrors_for_cli() ->
}
end || Q <- ClassicQs].
+-spec list_local_quorum_queues_with_name_matching(binary()) -> [amqqueue:amqqueue()].
+list_local_quorum_queues_with_name_matching(Pattern) ->
+ [ Q || Q <- list_by_type(quorum),
+ amqqueue:get_state(Q) =/= crashed,
+ lists:member(node(), get_quorum_nodes(Q)),
+ is_match(get_resource_name(amqqueue:get_name(Q)), Pattern)].
+
+-spec list_local_quorum_queues_with_name_matching(vhost:name(), binary()) -> [amqqueue:amqqueue()].
+list_local_quorum_queues_with_name_matching(VHostName, Pattern) ->
+ [ Q || Q <- list_by_type(quorum),
+ amqqueue:get_state(Q) =/= crashed,
+ lists:member(node(), get_quorum_nodes(Q)),
+ is_in_virtual_host(Q, VHostName),
+ is_match(get_resource_name(amqqueue:get_name(Q)), Pattern)].
+
is_local_to_node(QPid, Node) when ?IS_CLASSIC(QPid) ->
Node =:= node(QPid);
is_local_to_node({_, Leader} = QPid, Node) when ?IS_QUORUM(QPid) ->
- Node =:= Leader.
+ Node =:= Leader;
+is_local_to_node(_QPid, _Node) ->
+ false.
--spec list(rabbit_types:vhost()) -> [amqqueue:amqqueue()].
+is_in_virtual_host(Q, VHostName) ->
+ VHostName =:= get_resource_vhost_name(amqqueue:get_name(Q)).
+-spec list(vhost:name()) -> [amqqueue:amqqueue()].
list(VHostPath) ->
list(VHostPath, rabbit_queue).
@@ -1160,7 +1275,7 @@ count(VHost) ->
%% that requires a proper consensus algorithm.
length(list_for_count(VHost))
catch _:Err ->
- rabbit_log:error("Failed to fetch number of queues in vhost ~p:~n~p~n",
+ rabbit_log:error("Failed to fetch number of queues in vhost ~p:~n~p",
[VHost, Err]),
0
end.
@@ -1205,6 +1320,17 @@ is_unresponsive(Q, Timeout) when ?amqqueue_is_quorum(Q) ->
catch
exit:{timeout, _} ->
true
+ end;
+is_unresponsive(Q, Timeout) when ?amqqueue_is_stream(Q) ->
+ try
+ #{leader_pid := LeaderPid} = amqqueue:get_type_state(Q),
+ case gen_batch_server:call(LeaderPid, get_reader_context, Timeout) of
+ #{dir := _} -> false;
+ _ -> true
+ end
+ catch
+ exit:{timeout, _} ->
+ true
end.
format(Q) when ?amqqueue_is_quorum(Q) -> rabbit_quorum_queue:format(Q);
@@ -1494,6 +1620,13 @@ activate_limit_all(QRefs, ChPid) ->
delegate:invoke_no_result(QPids, {gen_server2, cast,
[{activate_limit, ChPid}]}).
+-spec deactivate_limit_all(qpids(), pid()) -> ok.
+
+deactivate_limit_all(QRefs, ChPid) ->
+ QPids = [P || P <- QRefs, ?IS_CLASSIC(P)],
+ delegate:invoke_no_result(QPids, {gen_server2, cast,
+ [{deactivate_limit, ChPid}]}).
+
-spec credit(amqqueue:amqqueue(),
rabbit_types:ctag(),
non_neg_integer(),
@@ -1550,8 +1683,7 @@ basic_cancel(Q, ConsumerTag, OkMsg, ActingUser, QStates) ->
-spec notify_decorators(amqqueue:amqqueue()) -> 'ok'.
notify_decorators(Q) ->
- QPid = amqqueue:get_pid(Q),
- delegate:invoke_no_result(QPid, {gen_server2, cast, [notify_decorators]}).
+ rabbit_queue_type:notify_decorators(Q).
notify_sent(QPid, ChPid) ->
rabbit_amqqueue_common:notify_sent(QPid, ChPid).
@@ -1660,7 +1792,7 @@ forget_node_for_queue(DeadNode, [H|T], Q) when ?is_amqqueue(Q) ->
node_permits_offline_promotion(Node) ->
case node() of
Node -> not rabbit:is_running(); %% [1]
- _ -> All = rabbit_mnesia:cluster_nodes(all),
+ _ -> All = rabbit_nodes:all(),
Running = rabbit_nodes:all_running(),
lists:member(Node, All) andalso
not lists:member(Node, Running) %% [2]
@@ -1714,10 +1846,11 @@ cancel_sync_mirrors(QPid) ->
-spec is_replicated(amqqueue:amqqueue()) -> boolean().
-is_replicated(Q) when ?amqqueue_is_quorum(Q) ->
- true;
-is_replicated(Q) ->
- rabbit_mirror_queue_misc:is_mirrored(Q).
+is_replicated(Q) when ?amqqueue_is_classic(Q) ->
+ rabbit_mirror_queue_misc:is_mirrored(Q);
+is_replicated(_Q) ->
+ %% streams and quorum queues are all replicated
+ true.
is_exclusive(Q) when ?amqqueue_exclusive_owner_is(Q, none) ->
false;
@@ -1785,7 +1918,11 @@ maybe_clear_recoverable_node(Node, Q) ->
-spec on_node_down(node()) -> 'ok'.
on_node_down(Node) ->
- {QueueNames, QueueDeletions} = delete_queues_on_node_down(Node),
+ {Time, {QueueNames, QueueDeletions}} = timer:tc(fun() -> delete_queues_on_node_down(Node) end),
+ case length(QueueNames) of
+ 0 -> ok;
+ _ -> rabbit_log:info("~p transient queues from an old incarnation of node ~p deleted in ~fs", [length(QueueNames), Node, Time/1000000])
+ end,
notify_queue_binding_deletions(QueueDeletions),
rabbit_core_metrics:queues_deleted(QueueNames),
notify_queues_deleted(QueueNames),
diff --git a/deps/rabbit/src/rabbit_amqqueue_process.erl b/deps/rabbit/src/rabbit_amqqueue_process.erl
index abad3b5ad4..a9d673d65d 100644
--- a/deps/rabbit/src/rabbit_amqqueue_process.erl
+++ b/deps/rabbit/src/rabbit_amqqueue_process.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqqueue_process).
@@ -110,6 +110,7 @@
single_active_consumer_tag,
consumers,
consumer_utilisation,
+ consumer_capacity,
memory,
slave_pids,
synchronised_slave_pids,
@@ -1118,10 +1119,12 @@ i(messages, State) ->
messages_unacknowledged]]);
i(consumers, _) ->
rabbit_queue_consumers:count();
-i(consumer_utilisation, #q{consumers = Consumers}) ->
+i(consumer_utilisation, State) ->
+ i(consumer_capacity, State);
+i(consumer_capacity, #q{consumers = Consumers}) ->
case rabbit_queue_consumers:count() of
- 0 -> '';
- _ -> rabbit_queue_consumers:utilisation(Consumers)
+ 0 -> 0;
+ _ -> rabbit_queue_consumers:capacity(Consumers)
end;
i(memory, _) ->
{memory, M} = process_info(self(), memory),
@@ -1579,6 +1582,10 @@ handle_cast({activate_limit, ChPid}, State) ->
noreply(possibly_unblock(rabbit_queue_consumers:activate_limit_fun(),
ChPid, State));
+handle_cast({deactivate_limit, ChPid}, State) ->
+ noreply(possibly_unblock(rabbit_queue_consumers:deactivate_limit_fun(),
+ ChPid, State));
+
handle_cast({set_ram_duration_target, Duration},
State = #q{backing_queue = BQ, backing_queue_state = BQS}) ->
BQS1 = BQ:set_ram_duration_target(Duration, BQS),
@@ -1617,23 +1624,16 @@ handle_cast({credit, ChPid, CTag, Credit, Drain},
% This event is necessary for the stats timer to be initialized with
% the correct values once the management agent has started
handle_cast({force_event_refresh, Ref},
- State = #q{consumers = Consumers,
- active_consumer = Holder}) ->
+ State = #q{consumers = Consumers}) ->
rabbit_event:notify(queue_created, infos(?CREATION_EVENT_KEYS, State), Ref),
QName = qname(State),
AllConsumers = rabbit_queue_consumers:all(Consumers),
- case Holder of
- none ->
- [emit_consumer_created(
- Ch, CTag, false, AckRequired, QName, Prefetch,
- Args, Ref, ActingUser) ||
- {Ch, CTag, AckRequired, Prefetch, _, _, Args, ActingUser}
- <- AllConsumers];
- {Ch, CTag} ->
- [{Ch, CTag, AckRequired, Prefetch, _, _, Args, ActingUser}] = AllConsumers,
- emit_consumer_created(
- Ch, CTag, true, AckRequired, QName, Prefetch, Args, Ref, ActingUser)
- end,
+ rabbit_log:debug("Queue ~s forced to re-emit events, consumers: ~p", [rabbit_misc:rs(QName), AllConsumers]),
+ [emit_consumer_created(
+ Ch, CTag, ActiveOrExclusive, AckRequired, QName, Prefetch,
+ Args, Ref, ActingUser) ||
+ {Ch, CTag, AckRequired, Prefetch, ActiveOrExclusive, _, Args, ActingUser}
+ <- AllConsumers],
noreply(rabbit_event:init_stats_timer(State, #q.stats_timer));
handle_cast(notify_decorators, State) ->
@@ -1750,8 +1750,7 @@ handle_pre_hibernate(State = #q{backing_queue = BQ,
State, #q.stats_timer,
fun () -> emit_stats(State,
[{idle_since,
- os:system_time(milli_seconds)},
- {consumer_utilisation, ''}])
+ os:system_time(milli_seconds)}])
end),
State1 = rabbit_event:stop_stats_timer(State#q{backing_queue_state = BQS3},
#q.stats_timer),
diff --git a/deps/rabbit/src/rabbit_amqqueue_sup.erl b/deps/rabbit/src/rabbit_amqqueue_sup.erl
index a9eaf4087f..421af337a7 100644
--- a/deps/rabbit/src/rabbit_amqqueue_sup.erl
+++ b/deps/rabbit/src/rabbit_amqqueue_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqqueue_sup).
@@ -13,7 +13,7 @@
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
@@ -24,8 +24,8 @@ start_link(Q, StartMode) ->
Marker = spawn_link(fun() -> receive stop -> ok end end),
ChildSpec = {rabbit_amqqueue,
{rabbit_prequeue, start_link, [Q, StartMode, Marker]},
- intrinsic, ?WORKER_WAIT, worker, [rabbit_amqqueue_process,
- rabbit_mirror_queue_slave]},
+ intrinsic, ?CLASSIC_QUEUE_WORKER_WAIT, worker,
+ [rabbit_amqqueue_process, rabbit_mirror_queue_slave]},
{ok, SupPid} = supervisor2:start_link(?MODULE, []),
{ok, QPid} = supervisor2:start_child(SupPid, ChildSpec),
unlink(Marker),
diff --git a/deps/rabbit/src/rabbit_amqqueue_sup_sup.erl b/deps/rabbit/src/rabbit_amqqueue_sup_sup.erl
index 732816b79f..b10702c87b 100644
--- a/deps/rabbit/src/rabbit_amqqueue_sup_sup.erl
+++ b/deps/rabbit/src/rabbit_amqqueue_sup_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqqueue_sup_sup).
@@ -15,7 +15,7 @@
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-define(SERVER, ?MODULE).
diff --git a/deps/rabbit/src/rabbit_auth_backend_internal.erl b/deps/rabbit/src/rabbit_auth_backend_internal.erl
index cb930a1630..9295b65a4c 100644
--- a/deps/rabbit/src/rabbit_auth_backend_internal.erl
+++ b/deps/rabbit/src/rabbit_auth_backend_internal.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_internal).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_authn_backend).
-behaviour(rabbit_authz_backend).
@@ -14,12 +14,15 @@
-export([user_login_authentication/2, user_login_authorization/2,
check_vhost_access/3, check_resource_access/4, check_topic_access/4]).
--export([add_user/3, delete_user/2, lookup_user/1, exists/1,
+-export([add_user/3, add_user/4, add_user/5, delete_user/2, lookup_user/1, exists/1,
change_password/3, clear_password/2,
hash_password/2, change_password_hash/2, change_password_hash/3,
set_tags/3, set_permissions/6, clear_permissions/3,
set_topic_permissions/6, clear_topic_permissions/3, clear_topic_permissions/4,
- add_user_sans_validation/3, put_user/2, put_user/3]).
+ add_user_sans_validation/3, put_user/2, put_user/3,
+ update_user/5,
+ update_user_with_hash/5,
+ add_user_sans_validation/6]).
-export([set_user_limits/3, clear_user_limits/3, is_over_connection_limit/1,
is_over_channel_limit/1, get_user_limits/0, get_user_limits/1]).
@@ -39,6 +42,8 @@
%% for testing
-export([hashing_module_for_user/1, expand_topic_permission/2]).
+-import(rabbit_data_coercion, [to_atom/1, to_list/1, to_binary/1]).
+
%%----------------------------------------------------------------------------
-type regexp() :: binary().
@@ -195,7 +200,7 @@ validate_and_alternate_credentials(Username, Password, ActingUser, Fun) ->
ok ->
Fun(Username, Password, ActingUser);
{error, Err} ->
- rabbit_log:error("Credential validation for '~s' failed!~n", [Username]),
+ rabbit_log:error("Credential validation for '~s' failed!", [Username]),
{error, Err}
end.
@@ -206,14 +211,56 @@ add_user(Username, Password, ActingUser) ->
validate_and_alternate_credentials(Username, Password, ActingUser,
fun add_user_sans_validation/3).
+-spec add_user(rabbit_types:username(), rabbit_types:password(),
+ rabbit_types:username(), [atom()]) -> 'ok' | {'error', string()}.
+
+add_user(Username, Password, ActingUser, Tags) ->
+ add_user(Username, Password, ActingUser, undefined, Tags).
+
+add_user(Username, Password, ActingUser, Limits, Tags) ->
+ validate_and_alternate_credentials(Username, Password, ActingUser,
+ add_user_sans_validation(Limits, Tags)).
+
add_user_sans_validation(Username, Password, ActingUser) ->
+ add_user_sans_validation(Username, Password, ActingUser, undefined, []).
+
+add_user_sans_validation(Limits, Tags) ->
+ fun(Username, Password, ActingUser) ->
+ add_user_sans_validation(Username, Password, ActingUser, Limits, Tags)
+ end.
+
+add_user_sans_validation(Username, Password, ActingUser, Limits, Tags) ->
rabbit_log:debug("Asked to create a new user '~s', password length in bytes: ~p", [Username, bit_size(Password)]),
%% hash_password will pick the hashing function configured for us
%% but we also need to store a hint as part of the record, so we
%% retrieve it here one more time
HashingMod = rabbit_password:hashing_mod(),
PasswordHash = hash_password(HashingMod, Password),
- User = internal_user:create_user(Username, PasswordHash, HashingMod),
+ User0 = internal_user:create_user(Username, PasswordHash, HashingMod),
+ ConvertedTags = [rabbit_data_coercion:to_atom(I) || I <- Tags],
+ User1 = internal_user:set_tags(User0, ConvertedTags),
+ User = case Limits of
+ undefined -> User1;
+ Term -> internal_user:update_limits(add, User1, Term)
+ end,
+ add_user_sans_validation_in(Username, User, ConvertedTags, Limits, ActingUser).
+
+add_user_sans_validation(Username, PasswordHash, HashingAlgorithm, Tags, Limits, ActingUser) ->
+ rabbit_log:debug("Asked to create a new user '~s' with password hash", [Username]),
+ ConvertedTags = [rabbit_data_coercion:to_atom(I) || I <- Tags],
+ HashingMod = rabbit_password:hashing_mod(),
+ User0 = internal_user:create_user(Username, PasswordHash, HashingMod),
+ User1 = internal_user:set_tags(
+ internal_user:set_password_hash(User0,
+ PasswordHash, HashingAlgorithm),
+ ConvertedTags),
+ User = case Limits of
+ undefined -> User1;
+ Term -> internal_user:update_limits(add, User1, Term)
+ end,
+ add_user_sans_validation_in(Username, User, ConvertedTags, Limits, ActingUser).
+
+add_user_sans_validation_in(Username, User, ConvertedTags, Limits, ActingUser) ->
try
R = rabbit_misc:execute_mnesia_transaction(
fun () ->
@@ -227,17 +274,22 @@ add_user_sans_validation(Username, Password, ActingUser) ->
rabbit_log:info("Created user '~s'", [Username]),
rabbit_event:notify(user_created, [{name, Username},
{user_who_performed_action, ActingUser}]),
+ case ConvertedTags of
+ [] -> ok;
+ _ -> notify_user_tags_set(Username, ConvertedTags, ActingUser)
+ end,
+ case Limits of
+ undefined -> ok;
+ _ -> notify_limit_set(Username, ActingUser, Limits)
+ end,
R
catch
throw:{error, {user_already_exists, _}} = Error ->
rabbit_log:warning("Failed to add user '~s': the user already exists", [Username]),
throw(Error);
- throw:Error ->
- rabbit_log:warning("Failed to add user '~s': ~p", [Username, Error]),
- throw(Error);
- exit:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to add user '~s': ~p", [Username, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
end .
-spec delete_user(rabbit_types:username(), rabbit_types:username()) -> 'ok'.
@@ -273,12 +325,9 @@ delete_user(Username, ActingUser) ->
throw:{error, {no_such_user, _}} = Error ->
rabbit_log:warning("Failed to delete user '~s': the user does not exist", [Username]),
throw(Error);
- throw:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to delete user '~s': ~p", [Username, Error]),
- throw(Error);
- exit:Error ->
- rabbit_log:warning("Failed to delete user '~s': ~p", [Username, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
end .
-spec lookup_user
@@ -321,18 +370,51 @@ change_password_sans_validation(Username, Password, ActingUser) ->
throw:{error, {no_such_user, _}} = Error ->
rabbit_log:warning("Failed to change password for user '~s': the user does not exist", [Username]),
throw(Error);
- throw:Error ->
- rabbit_log:warning("Failed to change password for user '~s': ~p", [Username, Error]),
- throw(Error);
- exit:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to change password for user '~s': ~p", [Username, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
+ end.
+
+update_user(Username, Password, Tags, Limits, ActingUser) ->
+ validate_and_alternate_credentials(Username, Password, ActingUser,
+ update_user_sans_validation(Tags, Limits)).
+
+update_user_sans_validation(Tags, Limits) ->
+ fun(Username, Password, ActingUser) ->
+ try
+ rabbit_log:debug("Asked to change password of user '~s', new password length in bytes: ~p", [Username, bit_size(Password)]),
+ HashingAlgorithm = rabbit_password:hashing_mod(),
+
+ rabbit_log:debug("Asked to set user tags for user '~s' to ~p", [Username, Tags]),
+
+ ConvertedTags = [rabbit_data_coercion:to_atom(I) || I <- Tags],
+ R = update_user_with_hash(Username,
+ hash_password(rabbit_password:hashing_mod(),
+ Password),
+ HashingAlgorithm,
+ ConvertedTags,
+ Limits),
+ rabbit_log:info("Successfully changed password for user '~s'", [Username]),
+ rabbit_event:notify(user_password_changed,
+ [{name, Username},
+ {user_who_performed_action, ActingUser}]),
+
+ notify_user_tags_set(Username, ConvertedTags, ActingUser),
+ R
+ catch
+ throw:{error, {no_such_user, _}} = Error ->
+ rabbit_log:warning("Failed to change password for user '~s': the user does not exist", [Username]),
+ throw(Error);
+ Class:Error:Stacktrace ->
+ rabbit_log:warning("Failed to change password for user '~s': ~p", [Username, Error]),
+ erlang:raise(Class, Error, Stacktrace)
+ end
end.
-spec clear_password(rabbit_types:username(), rabbit_types:username()) -> 'ok'.
clear_password(Username, ActingUser) ->
- rabbit_log:info("Clearing password for '~s'~n", [Username]),
+ rabbit_log:info("Clearing password for '~s'", [Username]),
R = change_password_hash(Username, <<"">>),
rabbit_event:notify(user_password_cleared,
[{name, Username},
@@ -353,10 +435,22 @@ change_password_hash(Username, PasswordHash) ->
change_password_hash(Username, PasswordHash, HashingAlgorithm) ->
- update_user(Username, fun(User) ->
- internal_user:set_password_hash(User,
- PasswordHash, HashingAlgorithm)
- end).
+ update_user_with_hash(Username, PasswordHash, HashingAlgorithm, [], undefined).
+
+update_user_with_hash(Username, PasswordHash, HashingAlgorithm, ConvertedTags, Limits) ->
+ update_user(Username,
+ fun(User0) ->
+ User1 = internal_user:set_password_hash(User0,
+ PasswordHash, HashingAlgorithm),
+ User2 = case Limits of
+ undefined -> User1;
+ _ -> internal_user:update_limits(add, User1, Limits)
+ end,
+ case ConvertedTags of
+ [] -> User2;
+ _ -> internal_user:set_tags(User2, ConvertedTags)
+ end
+ end).
-spec set_tags(rabbit_types:username(), [atom()], rabbit_types:username()) -> 'ok'.
@@ -367,22 +461,22 @@ set_tags(Username, Tags, ActingUser) ->
R = update_user(Username, fun(User) ->
internal_user:set_tags(User, ConvertedTags)
end),
- rabbit_log:info("Successfully set user tags for user '~s' to ~p", [Username, ConvertedTags]),
- rabbit_event:notify(user_tags_set, [{name, Username}, {tags, ConvertedTags},
- {user_who_performed_action, ActingUser}]),
+ notify_user_tags_set(Username, ConvertedTags, ActingUser),
R
catch
throw:{error, {no_such_user, _}} = Error ->
rabbit_log:warning("Failed to set tags for user '~s': the user does not exist", [Username]),
throw(Error);
- throw:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to set tags for user '~s': ~p", [Username, Error]),
- throw(Error);
- exit:Error ->
- rabbit_log:warning("Failed to set tags for user '~s': ~p", [Username, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
end .
+notify_user_tags_set(Username, ConvertedTags, ActingUser) ->
+ rabbit_log:info("Successfully set user tags for user '~s' to ~p", [Username, ConvertedTags]),
+ rabbit_event:notify(user_tags_set, [{name, Username}, {tags, ConvertedTags},
+ {user_who_performed_action, ActingUser}]).
+
-spec set_permissions
(rabbit_types:username(), rabbit_types:vhost(), regexp(), regexp(),
regexp(), rabbit_types:username()) ->
@@ -438,14 +532,10 @@ set_permissions(Username, VirtualHost, ConfigurePerm, WritePerm, ReadPerm, Actin
rabbit_log:warning("Failed to set permissions for '~s': the user does not exist",
[Username]),
throw(Error);
- throw:Error ->
- rabbit_log:warning("Failed to set permissions for '~s' in virtual host '~s': ~p",
- [Username, VirtualHost, Error]),
- throw(Error);
- exit:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to set permissions for '~s' in virtual host '~s': ~p",
[Username, VirtualHost, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
end.
-spec clear_permissions
@@ -478,14 +568,10 @@ clear_permissions(Username, VirtualHost, ActingUser) ->
rabbit_log:warning("Failed to clear permissions for '~s': the user does not exist",
[Username]),
throw(Error);
- throw:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to clear permissions for '~s' in virtual host '~s': ~p",
[Username, VirtualHost, Error]),
- throw(Error);
- exit:Error ->
- rabbit_log:warning("Failed to clear permissions for '~s' in virtual host '~s': ~p",
- [Username, VirtualHost, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
end.
@@ -555,14 +641,10 @@ set_topic_permissions(Username, VirtualHost, Exchange, WritePerm, ReadPerm, Acti
rabbit_log:warning("Failed to set topic permissions on exchange '~s' for '~s': the user does not exist.",
[Exchange, Username]),
throw(Error);
- throw:Error ->
- rabbit_log:warning("Failed to set topic permissions on exchange '~s' for '~s' in virtual host '~s': ~p.",
- [Exchange, Username, VirtualHost, Error]),
- throw(Error);
- exit:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to set topic permissions on exchange '~s' for '~s' in virtual host '~s': ~p.",
[Exchange, Username, VirtualHost, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
end .
clear_topic_permissions(Username, VirtualHost, ActingUser) ->
@@ -594,14 +676,10 @@ clear_topic_permissions(Username, VirtualHost, ActingUser) ->
rabbit_log:warning("Failed to clear topic permissions for '~s': the user does not exist",
[Username]),
throw(Error);
- throw:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to clear topic permissions for '~s' in virtual host '~s': ~p",
[Username, VirtualHost, Error]),
- throw(Error);
- exit:Error ->
- rabbit_log:warning("Failed to clear topic permissions for '~s' in virtual host '~s': ~p",
- [Username, VirtualHost, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
end.
clear_topic_permissions(Username, VirtualHost, Exchange, ActingUser) ->
@@ -635,14 +713,10 @@ clear_topic_permissions(Username, VirtualHost, Exchange, ActingUser) ->
rabbit_log:warning("Failed to clear topic permissions on exchange '~s' for '~s': the user does not exist",
[Exchange, Username]),
throw(Error);
- throw:Error ->
- rabbit_log:warning("Failed to clear topic permissions on exchange '~s' for '~s' in virtual host '~s': ~p",
- [Exchange, Username, VirtualHost, Error]),
- throw(Error);
- exit:Error ->
+ Class:Error:Stacktrace ->
rabbit_log:warning("Failed to clear topic permissions on exchange '~s' for '~s' in virtual host '~s': ~p",
[Exchange, Username, VirtualHost, Error]),
- exit(Error)
+ erlang:raise(Class, Error, Stacktrace)
end.
put_user(User, ActingUser) -> put_user(User, undefined, ActingUser).
@@ -662,9 +736,8 @@ put_user(User, Version, ActingUser) ->
true -> [administrator];
false -> []
end;
- {TagsS, _} ->
- [list_to_atom(string:strip(T)) ||
- T <- string:tokens(binary_to_list(TagsS), ",")]
+ {TagsVal, _} ->
+ tag_list_from(TagsVal)
end,
%% pre-configured, only applies to newly created users
@@ -679,13 +752,27 @@ put_user(User, Version, ActingUser) ->
rabbit_credential_validation:validate(Username, Password) =:= ok
end,
+ Limits = case rabbit_feature_flags:is_enabled(user_limits) of
+ false ->
+ undefined;
+ true ->
+ case maps:get(limits, User, undefined) of
+ undefined ->
+ undefined;
+ Term ->
+ case validate_user_limits(Term) of
+ ok -> Term;
+ Error -> throw(Error)
+ end
+ end
+ end,
case exists(Username) of
true ->
case {HasPassword, HasPasswordHash} of
{true, false} ->
- update_user_password(PassedCredentialValidation, Username, Password, Tags, ActingUser);
+ update_user_password(PassedCredentialValidation, Username, Password, Tags, Limits, ActingUser);
{false, true} ->
- update_user_password_hash(Username, PasswordHash, Tags, User, Version, ActingUser);
+ update_user_password_hash(Username, PasswordHash, Tags, Limits, User, Version);
{true, true} ->
throw({error, both_password_and_password_hash_are_provided});
%% clear password, update tags if needed
@@ -696,63 +783,54 @@ put_user(User, Version, ActingUser) ->
false ->
case {HasPassword, HasPasswordHash} of
{true, false} ->
- create_user_with_password(PassedCredentialValidation, Username, Password, Tags, Permissions, ActingUser);
+ create_user_with_password(PassedCredentialValidation, Username, Password, Tags, Permissions, Limits, ActingUser);
{false, true} ->
- create_user_with_password_hash(Username, PasswordHash, Tags, User, Version, Permissions, ActingUser);
+ create_user_with_password_hash(Username, PasswordHash, Tags, User, Version, Permissions, Limits, ActingUser);
{true, true} ->
throw({error, both_password_and_password_hash_are_provided});
{false, false} ->
%% this user won't be able to sign in using
%% a username/password pair but can be used for x509 certificate authentication,
%% with authn backends such as HTTP or LDAP and so on.
- create_user_with_password(PassedCredentialValidation, Username, <<"">>, Tags, Permissions, ActingUser)
+ create_user_with_password(PassedCredentialValidation, Username, <<"">>, Tags, Permissions, Limits, ActingUser)
end
end.
-update_user_password(_PassedCredentialValidation = true, Username, Password, Tags, ActingUser) ->
- rabbit_auth_backend_internal:change_password(Username, Password, ActingUser),
- rabbit_auth_backend_internal:set_tags(Username, Tags, ActingUser);
-update_user_password(_PassedCredentialValidation = false, _Username, _Password, _Tags, _ActingUser) ->
+update_user_password(_PassedCredentialValidation = true, Username, Password, Tags, Limits, ActingUser) ->
+ %% change_password, set_tags and limits
+ rabbit_auth_backend_internal:update_user(Username, Password, Tags, Limits, ActingUser);
+update_user_password(_PassedCredentialValidation = false, _Username, _Password, _Tags, _Limits, _ActingUser) ->
%% we don't log here because
%% rabbit_auth_backend_internal will do it
throw({error, credential_validation_failed}).
-update_user_password_hash(Username, PasswordHash, Tags, User, Version, ActingUser) ->
+update_user_password_hash(Username, PasswordHash, Tags, Limits, User, Version) ->
%% when a hash this provided, credential validation
%% is not applied
HashingAlgorithm = hashing_algorithm(User, Version),
Hash = rabbit_misc:b64decode_or_throw(PasswordHash),
- rabbit_auth_backend_internal:change_password_hash(
- Username, Hash, HashingAlgorithm),
- rabbit_auth_backend_internal:set_tags(Username, Tags, ActingUser).
-
-create_user_with_password(_PassedCredentialValidation = true, Username, Password, Tags, undefined, ActingUser) ->
- rabbit_auth_backend_internal:add_user(Username, Password, ActingUser),
- rabbit_auth_backend_internal:set_tags(Username, Tags, ActingUser);
-create_user_with_password(_PassedCredentialValidation = true, Username, Password, Tags, PreconfiguredPermissions, ActingUser) ->
- rabbit_auth_backend_internal:add_user(Username, Password, ActingUser),
- rabbit_auth_backend_internal:set_tags(Username, Tags, ActingUser),
+ ConvertedTags = [rabbit_data_coercion:to_atom(I) || I <- Tags],
+ rabbit_auth_backend_internal:update_user_with_hash(
+ Username, Hash, HashingAlgorithm, ConvertedTags, Limits).
+
+create_user_with_password(_PassedCredentialValidation = true, Username, Password, Tags, undefined, Limits, ActingUser) ->
+ rabbit_auth_backend_internal:add_user(Username, Password, ActingUser, Limits, Tags);
+create_user_with_password(_PassedCredentialValidation = true, Username, Password, Tags, PreconfiguredPermissions, Limits, ActingUser) ->
+ rabbit_auth_backend_internal:add_user(Username, Password, ActingUser, Limits, Tags),
preconfigure_permissions(Username, PreconfiguredPermissions, ActingUser);
-create_user_with_password(_PassedCredentialValidation = false, _Username, _Password, _Tags, _, _) ->
+create_user_with_password(_PassedCredentialValidation = false, _Username, _Password, _Tags, _, _, _) ->
%% we don't log here because
%% rabbit_auth_backend_internal will do it
throw({error, credential_validation_failed}).
-create_user_with_password_hash(Username, PasswordHash, Tags, User, Version, PreconfiguredPermissions, ActingUser) ->
+create_user_with_password_hash(Username, PasswordHash, Tags, User, Version, PreconfiguredPermissions, Limits, ActingUser) ->
%% when a hash this provided, credential validation
%% is not applied
HashingAlgorithm = hashing_algorithm(User, Version),
Hash = rabbit_misc:b64decode_or_throw(PasswordHash),
- %% first we create a user with dummy credentials and no
- %% validation applied, then we update password hash
- TmpPassword = rabbit_guid:binary(rabbit_guid:gen_secure(), "tmp"),
- rabbit_auth_backend_internal:add_user_sans_validation(Username, TmpPassword, ActingUser),
-
- rabbit_auth_backend_internal:change_password_hash(
- Username, Hash, HashingAlgorithm),
- rabbit_auth_backend_internal:set_tags(Username, Tags, ActingUser),
+ rabbit_auth_backend_internal:add_user_sans_validation(Username, Hash, HashingAlgorithm, Tags, Limits, ActingUser),
preconfigure_permissions(Username, PreconfiguredPermissions, ActingUser).
preconfigure_permissions(_Username, undefined, _ActingUser) ->
@@ -787,8 +865,7 @@ set_user_limits(Username, Definition, ActingUser) when is_map(Definition) ->
end.
validate_parameters_and_update_limit(Username, Term, ActingUser) ->
- case flatten_errors(rabbit_parameter_validation:proplist(
- <<"user-limits">>, user_limit_validation(), Term)) of
+ case validate_user_limits(Term) of
ok ->
update_user(Username, fun(User) ->
internal_user:update_limits(add, User, Term)
@@ -798,6 +875,10 @@ validate_parameters_and_update_limit(Username, Term, ActingUser) ->
{error_string, rabbit_misc:format(Reason, Arguments)}
end.
+validate_user_limits(Term) ->
+ flatten_errors(rabbit_parameter_validation:proplist(
+ <<"user-limits">>, user_limit_validation(), Term)).
+
user_limit_validation() ->
[{<<"max-connections">>, fun rabbit_parameter_validation:integer/2, optional},
{<<"max-channels">>, fun rabbit_parameter_validation:integer/2, optional}].
@@ -813,6 +894,11 @@ clear_user_limits(Username, LimitType, ActingUser) ->
end),
notify_limit_clear(Username, ActingUser).
+tag_list_from(Tags) when is_list(Tags) ->
+ [to_atom(string:strip(to_list(T))) || T <- Tags];
+tag_list_from(Tags) when is_binary(Tags) ->
+ [to_atom(string:strip(T)) || T <- string:tokens(to_list(Tags), ",")].
+
flatten_errors(L) ->
case [{F, A} || I <- lists:flatten([L]), {error, F, A} <- [I]] of
[] -> ok;
diff --git a/deps/rabbit/src/rabbit_auth_mechanism_amqplain.erl b/deps/rabbit/src/rabbit_auth_mechanism_amqplain.erl
index c81a337153..3751fafa73 100644
--- a/deps/rabbit/src/rabbit_auth_mechanism_amqplain.erl
+++ b/deps/rabbit/src/rabbit_auth_mechanism_amqplain.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_mechanism_amqplain).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_auth_mechanism).
diff --git a/deps/rabbit/src/rabbit_auth_mechanism_cr_demo.erl b/deps/rabbit/src/rabbit_auth_mechanism_cr_demo.erl
index 15439c461f..a4ef834387 100644
--- a/deps/rabbit/src/rabbit_auth_mechanism_cr_demo.erl
+++ b/deps/rabbit/src/rabbit_auth_mechanism_cr_demo.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_mechanism_cr_demo).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_auth_mechanism).
diff --git a/deps/rabbit/src/rabbit_auth_mechanism_plain.erl b/deps/rabbit/src/rabbit_auth_mechanism_plain.erl
index d704c72400..893a723b20 100644
--- a/deps/rabbit/src/rabbit_auth_mechanism_plain.erl
+++ b/deps/rabbit/src/rabbit_auth_mechanism_plain.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_mechanism_plain).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_auth_mechanism).
diff --git a/deps/rabbit/src/rabbit_autoheal.erl b/deps/rabbit/src/rabbit_autoheal.erl
index 6380d71895..cdf430b973 100644
--- a/deps/rabbit/src/rabbit_autoheal.erl
+++ b/deps/rabbit/src/rabbit_autoheal.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_autoheal).
@@ -118,7 +118,7 @@ init() ->
case State of
{leader_waiting, Winner, _} ->
rabbit_log:info(
- "Autoheal: in progress, requesting report from ~p~n", [Winner]),
+ "Autoheal: in progress, requesting report from ~p", [Winner]),
send(Winner, report_autoheal_status);
_ ->
ok
@@ -129,7 +129,7 @@ maybe_start(not_healing) ->
case enabled() of
true -> Leader = leader(),
send(Leader, {request_start, node()}),
- rabbit_log:info("Autoheal request sent to ~p~n", [Leader]),
+ rabbit_log:info("Autoheal request sent to ~p", [Leader]),
not_healing;
false -> not_healing
end;
@@ -144,13 +144,13 @@ enabled() ->
end.
leader() ->
- [Leader | _] = lists:usort(rabbit_mnesia:cluster_nodes(all)),
+ [Leader | _] = lists:usort(rabbit_nodes:all()),
Leader.
%% This is the winner receiving its last notification that a node has
%% stopped - all nodes can now start again
rabbit_down(Node, {winner_waiting, [Node], Notify}) ->
- rabbit_log:info("Autoheal: final node has stopped, starting...~n",[]),
+ rabbit_log:info("Autoheal: final node has stopped, starting...",[]),
winner_finish(Notify);
rabbit_down(Node, {winner_waiting, WaitFor, Notify}) ->
@@ -173,25 +173,25 @@ node_down(Node, {winner_waiting, _, Notify}) ->
node_down(Node, {leader_waiting, Node, _Notify}) ->
%% The winner went down, we don't know what to do so we simply abort.
- rabbit_log:info("Autoheal: aborting - winner ~p went down~n", [Node]),
+ rabbit_log:info("Autoheal: aborting - winner ~p went down", [Node]),
not_healing;
node_down(Node, {leader_waiting, _, _} = St) ->
%% If it is a partial partition, the winner might continue with the
%% healing process. If it is a full partition, the winner will also
%% see it and abort. Let's wait for it.
- rabbit_log:info("Autoheal: ~p went down, waiting for winner decision ~n", [Node]),
+ rabbit_log:info("Autoheal: ~p went down, waiting for winner decision ", [Node]),
St;
node_down(Node, _State) ->
- rabbit_log:info("Autoheal: aborting - ~p went down~n", [Node]),
+ rabbit_log:info("Autoheal: aborting - ~p went down", [Node]),
not_healing.
%% If the process that has to restart the node crashes for an unexpected reason,
%% we go back to a not healing state so the node is able to recover.
process_down({'EXIT', Pid, Reason}, {restarting, Pid}) when Reason =/= normal ->
rabbit_log:info("Autoheal: aborting - the process responsible for restarting the "
- "node terminated with reason: ~p~n", [Reason]),
+ "node terminated with reason: ~p", [Reason]),
not_healing;
process_down(_, State) ->
@@ -201,17 +201,17 @@ process_down(_, State) ->
%% TODO should we try to debounce this?
handle_msg({request_start, Node},
not_healing, Partitions) ->
- rabbit_log:info("Autoheal request received from ~p~n", [Node]),
+ rabbit_log:info("Autoheal request received from ~p", [Node]),
case check_other_nodes(Partitions) of
{error, E} ->
- rabbit_log:info("Autoheal request denied: ~s~n", [fmt_error(E)]),
+ rabbit_log:info("Autoheal request denied: ~s", [fmt_error(E)]),
not_healing;
{ok, AllPartitions} ->
{Winner, Losers} = make_decision(AllPartitions),
rabbit_log:info("Autoheal decision~n"
" * Partitions: ~p~n"
" * Winner: ~p~n"
- " * Losers: ~p~n",
+ " * Losers: ~p",
[AllPartitions, Winner, Losers]),
case node() =:= Winner of
true -> handle_msg({become_winner, Losers},
@@ -224,12 +224,12 @@ handle_msg({request_start, Node},
handle_msg({request_start, Node},
State, _Partitions) ->
rabbit_log:info("Autoheal request received from ~p when healing; "
- "ignoring~n", [Node]),
+ "ignoring", [Node]),
State;
handle_msg({become_winner, Losers},
not_healing, _Partitions) ->
- rabbit_log:info("Autoheal: I am the winner, waiting for ~p to stop~n",
+ rabbit_log:info("Autoheal: I am the winner, waiting for ~p to stop",
[Losers]),
stop_partition(Losers);
@@ -238,7 +238,7 @@ handle_msg({become_winner, Losers},
%% The leader has aborted the healing, might have seen us down but
%% we didn't see the same. Let's try again as it is the same partition.
rabbit_log:info("Autoheal: I am the winner and received a duplicated "
- "request, waiting again for ~p to stop~n", [Losers]),
+ "request, waiting again for ~p to stop", [Losers]),
stop_partition(Losers);
handle_msg({become_winner, _},
@@ -246,7 +246,7 @@ handle_msg({become_winner, _},
%% Something has happened to the leader, it might have seen us down but we
%% are still alive. Partitions have changed, cannot continue.
rabbit_log:info("Autoheal: I am the winner and received another healing "
- "request, partitions have changed to ~p. Aborting ~n", [Losers]),
+ "request, partitions have changed to ~p. Aborting ", [Losers]),
winner_finish(Losers),
not_healing;
@@ -264,7 +264,7 @@ handle_msg({winner_is, Winner}, State = {leader_waiting, Winner, _},
handle_msg(Request, {restarting, Pid} = St, _Partitions) ->
%% ignore, we can contribute no further
rabbit_log:info("Autoheal: Received the request ~p while waiting for ~p "
- "to restart the node. Ignoring it ~n", [Request, Pid]),
+ "to restart the node. Ignoring it ", [Request, Pid]),
St;
handle_msg(report_autoheal_status, not_healing, _Partitions) ->
@@ -286,14 +286,14 @@ handle_msg({autoheal_finished, Winner},
%% The winner is finished with the autoheal process and notified us
%% (the leader). We can transition to the "not_healing" state and
%% accept new requests.
- rabbit_log:info("Autoheal finished according to winner ~p~n", [Winner]),
+ rabbit_log:info("Autoheal finished according to winner ~p", [Winner]),
not_healing;
handle_msg({autoheal_finished, Winner}, not_healing, _Partitions)
when Winner =:= node() ->
%% We are the leader and the winner. The state already transitioned
%% to "not_healing" at the end of the autoheal process.
- rabbit_log:info("Autoheal finished according to winner ~p~n", [node()]),
+ rabbit_log:info("Autoheal finished according to winner ~p", [node()]),
not_healing;
handle_msg({autoheal_finished, Winner}, not_healing, _Partitions) ->
@@ -301,7 +301,7 @@ handle_msg({autoheal_finished, Winner}, not_healing, _Partitions) ->
%% transitioned to not_healing. However, the winner was still able
%% to finish. Let it pass.
rabbit_log:info("Autoheal finished according to winner ~p."
- " Unexpected, I might have previously seen the winner down~n", [Winner]),
+ " Unexpected, I might have previously seen the winner down", [Winner]),
not_healing.
%%----------------------------------------------------------------------------
@@ -309,7 +309,7 @@ handle_msg({autoheal_finished, Winner}, not_healing, _Partitions) ->
send(Node, Msg) -> {?SERVER, Node} ! {autoheal_msg, Msg}.
abort(Down, Notify) ->
- rabbit_log:info("Autoheal: aborting - ~p down~n", [Down]),
+ rabbit_log:info("Autoheal: aborting - ~p down", [Down]),
%% Make sure any nodes waiting for us start - it won't necessarily
%% heal the partition but at least they won't get stuck.
%% If we are executing this, we are not stopping. Thus, don't wait
@@ -354,15 +354,14 @@ wait_for_supervisors(Monitors) ->
60000 ->
AliveLosers = [Node || {_, Node} <- pmon:monitored(Monitors)],
rabbit_log:info("Autoheal: mnesia in nodes ~p is still up, sending "
- "winner notification again to these ~n", [AliveLosers]),
+ "winner notification again to these ", [AliveLosers]),
[send(L, {winner_is, node()}) || L <- AliveLosers],
wait_for_mnesia_shutdown(AliveLosers)
end
end.
restart_loser(State, Winner) ->
- rabbit_log:warning(
- "Autoheal: we were selected to restart; winner is ~p~n", [Winner]),
+ rabbit_log:warning("Autoheal: we were selected to restart; winner is ~p", [Winner]),
NextStateTimeout = application:get_env(rabbit, autoheal_state_transition_timeout, 60000),
rabbit_node_monitor:run_outside_applications(
fun () ->
@@ -411,7 +410,7 @@ partition_value(Partition) ->
%% only know which nodes we have been partitioned from, not which
%% nodes are partitioned from each other.
check_other_nodes(LocalPartitions) ->
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ Nodes = rabbit_nodes:all(),
{Results, Bad} = rabbit_node_monitor:status(Nodes -- [node()]),
RemotePartitions = [{Node, proplists:get_value(partitions, Res)}
|| {Node, Res} <- Results],
diff --git a/deps/rabbit/src/rabbit_backing_queue.erl b/deps/rabbit/src/rabbit_backing_queue.erl
index 4d709e14d0..3c113eb110 100644
--- a/deps/rabbit/src/rabbit_backing_queue.erl
+++ b/deps/rabbit/src/rabbit_backing_queue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_backing_queue).
diff --git a/deps/rabbit/src/rabbit_basic.erl b/deps/rabbit/src/rabbit_basic.erl
index cdc9e082e4..cc7c00047e 100644
--- a/deps/rabbit/src/rabbit_basic.erl
+++ b/deps/rabbit/src/rabbit_basic.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_basic).
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
-export([publish/4, publish/5, publish/1,
message/3, message/4, properties/1, prepend_table_header/3,
diff --git a/deps/rabbit/src/rabbit_binding.erl b/deps/rabbit/src/rabbit_binding.erl
index 6ef25c4e60..6890097169 100644
--- a/deps/rabbit/src/rabbit_binding.erl
+++ b/deps/rabbit/src/rabbit_binding.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_binding).
@@ -76,21 +76,22 @@ new(Src, RoutingKey, Dst, Arguments) ->
%% Global table recovery
--spec recover([rabbit_exchange:name()], [rabbit_amqqueue:name()]) ->
- 'ok'.
-
recover() ->
- rabbit_misc:table_filter(
- fun (Route) ->
- mnesia:read({rabbit_semi_durable_route, Route}) =:= []
- end,
- fun (Route, true) ->
- ok = mnesia:write(rabbit_semi_durable_route, Route, write);
- (_Route, false) ->
- ok
- end, rabbit_durable_route).
+ rabbit_misc:execute_mnesia_transaction(
+ fun () ->
+ mnesia:lock({table, rabbit_durable_route}, read),
+ mnesia:lock({table, rabbit_semi_durable_route}, write),
+ Routes = rabbit_misc:dirty_read_all(rabbit_durable_route),
+ Fun = fun(Route) ->
+ mnesia:dirty_write(rabbit_semi_durable_route, Route)
+ end,
+ lists:foreach(Fun, Routes)
+ end).
%% Virtual host-specific recovery
+
+-spec recover([rabbit_exchange:name()], [rabbit_amqqueue:name()]) ->
+ 'ok'.
recover(XNames, QNames) ->
XNameSet = sets:from_list(XNames),
QNameSet = sets:from_list(QNames),
diff --git a/deps/rabbit/src/rabbit_boot_steps.erl b/deps/rabbit/src/rabbit_boot_steps.erl
index f87448edb7..0463895dd5 100644
--- a/deps/rabbit/src/rabbit_boot_steps.erl
+++ b/deps/rabbit/src/rabbit_boot_steps.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_boot_steps).
@@ -39,7 +39,9 @@ run_step(Attributes, AttributeName) ->
rabbit_log:debug("Applying MFA: M = ~s, F = ~s, A = ~p",
[M, F, A]),
case apply(M,F,A) of
- ok -> ok;
+ ok ->
+ rabbit_log:debug("Finished MFA: M = ~s, F = ~s, A = ~p",
+ [M, F, A]);
{error, Reason} -> exit({error, Reason})
end
end
diff --git a/deps/rabbit/src/rabbit_channel.erl b/deps/rabbit/src/rabbit_channel.erl
index 8e7828a7c0..cd8ebe4446 100644
--- a/deps/rabbit/src/rabbit_channel.erl
+++ b/deps/rabbit/src/rabbit_channel.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_channel).
@@ -165,7 +165,8 @@
delivery_flow,
interceptor_state,
queue_states,
- tick_timer
+ tick_timer,
+ publishing_mode = false :: boolean()
}).
-define(QUEUE, lqueue).
@@ -298,12 +299,21 @@ deliver(Pid, ConsumerTag, AckRequired, Msg) ->
-spec deliver_reply(binary(), rabbit_types:delivery()) -> 'ok'.
-deliver_reply(<<"amq.rabbitmq.reply-to.", Rest/binary>>, Delivery) ->
- case decode_fast_reply_to(Rest) of
+deliver_reply(<<"amq.rabbitmq.reply-to.", EncodedBin/binary>>, Delivery) ->
+ case rabbit_direct_reply_to:decode_reply_to_v2(EncodedBin, rabbit_nodes:all_running_with_hashes()) of
{ok, Pid, Key} ->
- delegate:invoke_no_result(
- Pid, {?MODULE, deliver_reply_local, [Key, Delivery]});
- error ->
+ delegate:invoke_no_result(Pid, {?MODULE, deliver_reply_local, [Key, Delivery]});
+ {error, _} ->
+ deliver_reply_v1(EncodedBin, Delivery)
+ end.
+
+-spec deliver_reply_v1(binary(), rabbit_types:delivery()) -> 'ok'.
+deliver_reply_v1(EncodedBin, Delivery) ->
+ %% the the original encoding function
+ case rabbit_direct_reply_to:decode_reply_to_v1(EncodedBin) of
+ {ok, V1Pid, V1Key} ->
+ delegate:invoke_no_result(V1Pid, {?MODULE, deliver_reply_local, [V1Key, Delivery]});
+ {error, _} ->
ok
end.
@@ -320,24 +330,29 @@ deliver_reply_local(Pid, Key, Delivery) ->
declare_fast_reply_to(<<"amq.rabbitmq.reply-to">>) ->
exists;
-declare_fast_reply_to(<<"amq.rabbitmq.reply-to.", Rest/binary>>) ->
- case decode_fast_reply_to(Rest) of
+declare_fast_reply_to(<<"amq.rabbitmq.reply-to.", EncodedBin/binary>>) ->
+ case rabbit_direct_reply_to:decode_reply_to_v2(EncodedBin, rabbit_nodes:all_running_with_hashes()) of
+ {error, _} ->
+ declare_fast_reply_to_v1(EncodedBin);
{ok, Pid, Key} ->
Msg = {declare_fast_reply_to, Key},
rabbit_misc:with_exit_handler(
rabbit_misc:const(not_found),
- fun() -> gen_server2:call(Pid, Msg, infinity) end);
- error ->
- not_found
+ fun() -> gen_server2:call(Pid, Msg, infinity) end)
end;
declare_fast_reply_to(_) ->
not_found.
-decode_fast_reply_to(Rest) ->
- case string:tokens(binary_to_list(Rest), ".") of
- [PidEnc, Key] -> Pid = binary_to_term(base64:decode(PidEnc)),
- {ok, Pid, Key};
- _ -> error
+declare_fast_reply_to_v1(EncodedBin) ->
+ %% the the original encoding function
+ case rabbit_direct_reply_to:decode_reply_to_v1(EncodedBin) of
+ {ok, V1Pid, V1Key} ->
+ Msg = {declare_fast_reply_to, V1Key},
+ rabbit_misc:with_exit_handler(
+ rabbit_misc:const(not_found),
+ fun() -> gen_server2:call(V1Pid, Msg, infinity) end);
+ {error, _} ->
+ not_found
end.
-spec send_credit_reply(pid(), non_neg_integer()) -> 'ok'.
@@ -802,8 +817,10 @@ handle_info({'DOWN', _MRef, process, QPid, Reason},
State1 = State0#ch{queue_states = QState1},
State = handle_queue_actions(Actions, State1),
noreply_coalesce(State);
- {eol, QRef} ->
- State1 = handle_consuming_queue_down_or_eol(QRef, State0),
+ {eol, QState1, QRef} ->
+ State1 = handle_consuming_queue_down_or_eol(QRef, State0#ch{
+ queue_states = QState1
+ }),
{ConfirmMXs, UC1} =
rabbit_confirms:remove_queue(QRef, State1#ch.unconfirmed),
%% Deleted queue is a special case.
@@ -812,7 +829,7 @@ handle_info({'DOWN', _MRef, process, QPid, Reason},
State1#ch{unconfirmed = UC1}),
erase_queue_stats(QRef),
noreply_coalesce(
- State2#ch{queue_states = rabbit_queue_type:remove(QRef, QStates0)})
+ State2#ch{queue_states = rabbit_queue_type:remove(QRef, State2#ch.queue_states)})
end;
handle_info({'EXIT', _Pid, Reason}, State) ->
@@ -864,6 +881,7 @@ terminate(_Reason,
rabbit_event:if_enabled(State, #ch.stats_timer,
fun() -> emit_stats(State) end),
[delete_stats(Tag) || {Tag, _} <- get()],
+ maybe_decrease_global_publishers(State),
rabbit_core_metrics:channel_closed(self()),
rabbit_event:notify(channel_closed, [{pid, self()},
{user_who_performed_action, Username}]).
@@ -934,7 +952,7 @@ handle_exception(Reason, State = #ch{cfg = #conf{protocol = Protocol,
{Channel, CloseMethod} ->
rabbit_log_channel:error(
"Channel error on connection ~p (~s, vhost: '~s',"
- " user: '~s'), channel ~p:~n~s~n",
+ " user: '~s'), channel ~p:~n~s",
[ConnPid, ConnName, VHost, User#user.username,
Channel, format_soft_error(Reason)]),
ok = rabbit_writer:send_command(WriterPid, CloseMethod),
@@ -1271,6 +1289,8 @@ handle_method(#'basic.publish'{exchange = ExchangeNameBin,
confirm_enabled = ConfirmEnabled,
delivery_flow = Flow
}) ->
+ State0 = maybe_increase_global_publishers(State),
+ rabbit_global_counters:messages_received(amqp091, 1),
check_msg_size(Content, MaxMessageSize, GCThreshold),
ExchangeName = rabbit_misc:r(VHostPath, exchange, ExchangeNameBin),
check_write_permitted(ExchangeName, User, AuthzContext),
@@ -1286,10 +1306,11 @@ handle_method(#'basic.publish'{exchange = ExchangeNameBin,
check_expiration_header(Props),
DoConfirm = Tx =/= none orelse ConfirmEnabled,
{MsgSeqNo, State1} =
- case DoConfirm orelse Mandatory of
- false -> {undefined, State};
- true -> SeqNo = State#ch.publish_seqno,
- {SeqNo, State#ch{publish_seqno = SeqNo + 1}}
+ case DoConfirm of
+ false -> {undefined, State0};
+ true -> rabbit_global_counters:messages_received_confirm(amqp091, 1),
+ SeqNo = State0#ch.publish_seqno,
+ {SeqNo, State0#ch{publish_seqno = SeqNo + 1}}
end,
case rabbit_basic:message(ExchangeName, RoutingKey, DecodedContent) of
{ok, Message} ->
@@ -1300,9 +1321,11 @@ handle_method(#'basic.publish'{exchange = ExchangeNameBin,
Username, TraceState),
DQ = {Delivery#delivery{flow = Flow}, QNames},
{noreply, case Tx of
- none -> deliver_to_queues(DQ, State1);
- {Msgs, Acks} -> Msgs1 = ?QUEUE:in(DQ, Msgs),
- State1#ch{tx = {Msgs1, Acks}}
+ none ->
+ deliver_to_queues(DQ, State1);
+ {Msgs, Acks} ->
+ Msgs1 = ?QUEUE:in(DQ, Msgs),
+ State1#ch{tx = {Msgs1, Acks}}
end};
{error, Reason} ->
precondition_failed("invalid message: ~p", [Reason])
@@ -1346,14 +1369,14 @@ handle_method(#'basic.get'{queue = QueueNameBin, no_ack = NoAck},
DeliveryTag, QueueStates0)
end) of
{ok, MessageCount, Msg, QueueStates} ->
+ {ok, QueueType} = rabbit_queue_type:module(QueueName, QueueStates),
handle_basic_get(WriterPid, DeliveryTag, NoAck, MessageCount, Msg,
- State#ch{queue_states = QueueStates});
+ QueueType, State#ch{queue_states = QueueStates});
{empty, QueueStates} ->
+ {ok, QueueType} = rabbit_queue_type:module(QueueName, QueueStates),
+ rabbit_global_counters:messages_get_empty(amqp091, QueueType, 1),
?INCR_STATS(queue_stats, QueueName, 1, get_empty, State),
{reply, #'basic.get_empty'{}, State#ch{queue_states = QueueStates}};
- empty ->
- ?INCR_STATS(queue_stats, QueueName, 1, get_empty, State),
- {reply, #'basic.get_empty'{}, State};
{error, {unsupported, single_active_consumer}} ->
rabbit_misc:protocol_error(
resource_locked,
@@ -1384,12 +1407,9 @@ handle_method(#'basic.consume'{queue = <<"amq.rabbitmq.reply-to">>,
rabbit_guid:gen_secure(), "amq.ctag");
Other -> Other
end,
- %% Precalculate both suffix and key; base64 encoding is
- %% expensive
- Key = base64:encode(rabbit_guid:gen_secure()),
- PidEnc = base64:encode(term_to_binary(self())),
- Suffix = <<PidEnc/binary, ".", Key/binary>>,
- Consumer = {CTag, Suffix, binary_to_list(Key)},
+ %% Precalculate both suffix and key
+ {Key, Suffix} = rabbit_direct_reply_to:compute_key_and_suffix_v2(self()),
+ Consumer = {CTag, Suffix, Key},
State1 = State#ch{reply_consumer = Consumer},
case NoWait of
true -> {noreply, State1};
@@ -1454,6 +1474,14 @@ handle_method(#'basic.consume'{queue = QueueNameBin,
{error, global_qos_not_supported_for_queue_type} ->
rabbit_misc:protocol_error(
not_implemented, "~s does not support global qos",
+ [rabbit_misc:rs(QueueName)]);
+ {error, timeout} ->
+ rabbit_misc:protocol_error(
+ internal_error, "~s timeout occurred during consume operation",
+ [rabbit_misc:rs(QueueName)]);
+ {error, no_local_stream_replica_available} ->
+ rabbit_misc:protocol_error(
+ resource_error, "~s does not not have a running local replica",
[rabbit_misc:rs(QueueName)])
end;
{ok, _} ->
@@ -1500,6 +1528,7 @@ handle_method(#'basic.cancel'{consumer_tag = ConsumerTag, nowait = NoWait},
Username, QueueStates0)
end) of
{ok, QueueStates} ->
+ rabbit_global_counters:consumer_deleted(amqp091),
{noreply, NewState#ch{queue_states = QueueStates}};
{error, not_found} ->
%% Spec requires we ignore this situation.
@@ -1517,6 +1546,11 @@ handle_method(#'basic.qos'{global = false,
limiter = Limiter}) ->
%% Ensures that if default was set, it's overridden
Limiter1 = rabbit_limiter:unlimit_prefetch(Limiter),
+ case rabbit_limiter:is_active(Limiter) of
+ true -> rabbit_amqqueue:deactivate_limit_all(
+ classic_consumer_queue_pids(State#ch.consumer_mapping), self());
+ false -> ok
+ end,
{reply, #'basic.qos_ok'{}, State#ch{cfg = Cfg#conf{consumer_prefetch = PrefetchCount},
limiter = Limiter1}};
@@ -1524,6 +1558,11 @@ handle_method(#'basic.qos'{global = true,
prefetch_count = 0},
_, State = #ch{limiter = Limiter}) ->
Limiter1 = rabbit_limiter:unlimit_prefetch(Limiter),
+ case rabbit_limiter:is_active(Limiter) of
+ true -> rabbit_amqqueue:deactivate_limit_all(
+ classic_consumer_queue_pids(State#ch.consumer_mapping), self());
+ false -> ok
+ end,
{reply, #'basic.qos_ok'{}, State#ch{limiter = Limiter1}};
handle_method(#'basic.qos'{global = true,
@@ -1677,9 +1716,9 @@ handle_method(#'tx.select'{}, _, State) ->
handle_method(#'tx.commit'{}, _, #ch{tx = none}) ->
precondition_failed("channel is not transactional");
-handle_method(#'tx.commit'{}, _, State = #ch{tx = {Msgs, Acks},
+handle_method(#'tx.commit'{}, _, State = #ch{tx = {Deliveries, Acks},
limiter = Limiter}) ->
- State1 = queue_fold(fun deliver_to_queues/2, State, Msgs),
+ State1 = queue_fold(fun deliver_to_queues/2, State, Deliveries),
Rev = fun (X) -> lists:reverse(lists:sort(X)) end,
{State2, Actions2} =
lists:foldl(fun ({ack, A}, {Acc, Actions}) ->
@@ -1759,6 +1798,7 @@ basic_consume(QueueName, NoAck, ConsumerPrefetch, ActualConsumerTag,
Q}
end) of
{{ok, QueueStates, Actions}, Q} when ?is_amqqueue(Q) ->
+ rabbit_global_counters:consumer_created(amqp091),
CM1 = maps:put(
ActualConsumerTag,
{Q, {NoAck, ConsumerPrefetch, ExclusiveConsume, Args}},
@@ -1775,6 +1815,10 @@ basic_consume(QueueName, NoAck, ConsumerPrefetch, ActualConsumerTag,
E;
{{error, global_qos_not_supported_for_queue_type} = E, _Q} ->
E;
+ {{error, no_local_stream_replica_available} = E, _Q} ->
+ E;
+ {{error, timeout} = E, _Q} ->
+ E;
{{protocol_error, Type, Reason, ReasonArgs}, _Q} ->
rabbit_misc:protocol_error(Type, Reason, ReasonArgs)
end.
@@ -1831,6 +1875,7 @@ cancel_consumer(CTag, QName,
nowait = true}, State);
_ -> ok
end,
+ rabbit_global_counters:consumer_deleted(amqp091),
rabbit_event:notify(consumer_deleted, [{consumer_tag, CTag},
{channel, self()},
{queue, QName}]),
@@ -1937,7 +1982,7 @@ internal_reject(Requeue, Acked, Limiter,
ok = notify_limiter(Limiter, Acked),
{State#ch{queue_states = QueueStates}, Actions}.
-record_sent(Type, Tag, AckRequired,
+record_sent(Type, QueueType, Tag, AckRequired,
Msg = {QName, _QPid, MsgId, Redelivered, _Message},
State = #ch{cfg = #conf{channel = ChannelNum,
trace_state = TraceState,
@@ -1947,15 +1992,28 @@ record_sent(Type, Tag, AckRequired,
unacked_message_q = UAMQ,
next_tag = DeliveryTag
}) ->
- ?INCR_STATS(queue_stats, QName, 1, case {Type, AckRequired} of
- {get, true} -> get;
- {get, false} -> get_no_ack;
- {deliver, true} -> deliver;
- {deliver, false} -> deliver_no_ack
- end, State),
+ rabbit_global_counters:messages_delivered(amqp091, QueueType, 1),
+ ?INCR_STATS(queue_stats, QName, 1,
+ case {Type, AckRequired} of
+ {get, true} ->
+ rabbit_global_counters:messages_delivered_get_manual_ack(amqp091, QueueType, 1),
+ get;
+ {get, false} ->
+ rabbit_global_counters:messages_delivered_get_auto_ack(amqp091, QueueType, 1),
+ get_no_ack;
+ {deliver, true} ->
+ rabbit_global_counters:messages_delivered_consume_manual_ack(amqp091, QueueType, 1),
+ deliver;
+ {deliver, false} ->
+ rabbit_global_counters:messages_delivered_consume_auto_ack(amqp091, QueueType, 1),
+ deliver_no_ack
+ end, State),
case Redelivered of
- true -> ?INCR_STATS(queue_stats, QName, 1, redeliver, State);
- false -> ok
+ true ->
+ rabbit_global_counters:messages_redelivered(amqp091, QueueType, 1),
+ ?INCR_STATS(queue_stats, QName, 1, redeliver, State);
+ false ->
+ ok
end,
DeliveredAt = os:system_time(millisecond),
rabbit_trace:tap_out(Msg, ConnName, ChannelNum, Username, TraceState),
@@ -2017,8 +2075,14 @@ ack(Acked, State = #ch{queue_states = QueueStates0}) ->
ok = notify_limiter(State#ch.limiter, Acked),
{State#ch{queue_states = QueueStates}, Actions}.
-incr_queue_stats(QName, MsgIds, State) ->
+incr_queue_stats(QName, MsgIds, State = #ch{queue_states = QueueStates}) ->
Count = length(MsgIds),
+ case rabbit_queue_type:module(QName, QueueStates) of
+ {ok, QueueType} ->
+ rabbit_global_counters:messages_acknowledged(amqp091, QueueType, Count);
+ _ ->
+ noop
+ end,
?INCR_STATS(queue_stats, QName, Count, ack, State).
%% {Msgs, Acks}
@@ -2075,13 +2139,13 @@ notify_limiter(Limiter, Acked) ->
%% common case.
case rabbit_limiter:is_active(Limiter) of
false -> ok;
- true -> case lists:foldl(fun ({_, CTag, _, _}, Acc) when is_integer(CTag) ->
+ true -> case lists:foldl(fun (#pending_ack{tag = CTag}, Acc) when is_integer(CTag) ->
%% Quorum queues use integer CTags
%% classic queues use binaries
%% Quorum queues do not interact
%% with limiters
Acc;
- ({_, _, _, _}, Acc) -> Acc + 1
+ (_, Acc) -> Acc + 1
end, 0, Acked) of
0 -> ok;
Count -> rabbit_limiter:ack(Limiter, Count)
@@ -2091,51 +2155,126 @@ notify_limiter(Limiter, Acked) ->
deliver_to_queues({#delivery{message = #basic_message{exchange_name = XName},
confirm = false,
mandatory = false},
- _RoutedToQs = []}, State) -> %% optimisation
+ _RoutedToQueueNames = []}, State) -> %% optimisation when there are no queues
?INCR_STATS(exchange_stats, XName, 1, publish, State),
+ rabbit_global_counters:messages_unroutable_dropped(amqp091, 1),
?INCR_STATS(exchange_stats, XName, 1, drop_unroutable, State),
State;
-deliver_to_queues({Delivery = #delivery{message = Message = #basic_message{
- exchange_name = XName},
+deliver_to_queues({Delivery = #delivery{message = Message = #basic_message{exchange_name = XName},
mandatory = Mandatory,
confirm = Confirm,
msg_seq_no = MsgSeqNo},
- DelQNames}, State0 = #ch{queue_states = QueueStates0}) ->
- Qs = rabbit_amqqueue:lookup(DelQNames),
- AllQueueNames = lists:foldl(fun (Q, Acc) ->
- QRef = amqqueue:get_name(Q),
- [QRef | Acc]
- end, [], Qs),
- {ok, QueueStates, Actions} =
- rabbit_queue_type:deliver(Qs, Delivery, QueueStates0),
- %% NB: the order here is important since basic.returns must be
- %% sent before confirms.
- ok = process_routing_mandatory(Mandatory, Qs, Message, State0),
- State1 = process_routing_confirm(Confirm, AllQueueNames,
- MsgSeqNo, XName, State0),
- %% Actions must be processed after registering confirms as actions may
- %% contain rejections of publishes
- State = handle_queue_actions(Actions,
- State1#ch{queue_states = QueueStates}),
- case rabbit_event:stats_level(State, #ch.stats_timer) of
- fine ->
- ?INCR_STATS(exchange_stats, XName, 1, publish),
- [?INCR_STATS(queue_exchange_stats,
- {amqqueue:get_name(Q), XName}, 1, publish)
- || Q <- Qs];
- _ ->
- ok
+ _RoutedToQueueNames = [QName]}, State0 = #ch{queue_states = QueueStates0}) -> %% optimisation when there is one queue
+ AllNames = case rabbit_amqqueue:lookup(QName) of
+ {ok, Q0} ->
+ case amqqueue:get_options(Q0) of
+ #{extra_bcc := BCC} -> [QName, rabbit_misc:r(QName#resource.virtual_host, queue, BCC)];
+ _ -> [QName]
+ end;
+ _ -> []
end,
- State.
+ Qs = rabbit_amqqueue:lookup(AllNames),
+ case rabbit_queue_type:deliver(Qs, Delivery, QueueStates0) of
+ {ok, QueueStates, Actions} ->
+ rabbit_global_counters:messages_routed(amqp091, erlang:min(1, length(Qs))),
+ %% NB: the order here is important since basic.returns must be
+ %% sent before confirms.
+ ok = process_routing_mandatory(Mandatory, Qs, Message, State0),
+ State1 = process_routing_confirm(Confirm, AllNames, MsgSeqNo, XName, State0),
+ %% Actions must be processed after registering confirms as actions may
+ %% contain rejections of publishes
+ State = handle_queue_actions(Actions, State1#ch{queue_states = QueueStates}),
+ case rabbit_event:stats_level(State, #ch.stats_timer) of
+ fine ->
+ ?INCR_STATS(exchange_stats, XName, 1, publish),
+ ?INCR_STATS(queue_exchange_stats, {QName, XName}, 1, publish);
+ _ ->
+ ok
+ end,
+ State;
+ {error, {stream_not_found, Resource}} ->
+ rabbit_misc:protocol_error(
+ resource_error,
+ "Stream not found for ~s",
+ [rabbit_misc:rs(Resource)]);
+ {error, {coordinator_unavailable, Resource}} ->
+ rabbit_misc:protocol_error(
+ resource_error,
+ "Stream coordinator unavailable for ~s",
+ [rabbit_misc:rs(Resource)])
+ end;
+deliver_to_queues({Delivery = #delivery{message = Message = #basic_message{exchange_name = XName},
+ mandatory = Mandatory,
+ confirm = Confirm,
+ msg_seq_no = MsgSeqNo},
+ RoutedToQueueNames}, State0 = #ch{queue_states = QueueStates0}) ->
+ Qs0 = rabbit_amqqueue:lookup(RoutedToQueueNames),
+ AllQueueNames = lists:map(fun amqqueue:get_name/1, Qs0),
+ AllExtraBCCs = infer_extra_bcc(Qs0),
+ %% Collect implicit BCC targets these queues may have
+ Qs = case AllExtraBCCs of
+ [] -> Qs0;
+ ExtraNames -> Qs0 ++ rabbit_amqqueue:lookup(ExtraNames)
+ end,
+ case rabbit_queue_type:deliver(Qs, Delivery, QueueStates0) of
+ {ok, QueueStates, Actions} ->
+ rabbit_global_counters:messages_routed(amqp091, length(Qs)),
+ %% NB: the order here is important since basic.returns must be
+ %% sent before confirms.
+ ok = process_routing_mandatory(Mandatory, Qs, Message, State0),
+ State1 = process_routing_confirm(Confirm, AllQueueNames,
+ MsgSeqNo, XName, State0),
+ %% Actions must be processed after registering confirms as actions may
+ %% contain rejections of publishes
+ State = handle_queue_actions(Actions, State1#ch{queue_states = QueueStates}),
+ case rabbit_event:stats_level(State, #ch.stats_timer) of
+ fine ->
+ ?INCR_STATS(exchange_stats, XName, 1, publish),
+ [?INCR_STATS(queue_exchange_stats, {QName, XName}, 1, publish)
+ || QName <- AllQueueNames];
+ _ ->
+ ok
+ end,
+ State;
+ {error, {coordinator_unavailable, Resource}} ->
+ rabbit_misc:protocol_error(
+ resource_error,
+ "Stream coordinator unavailable for ~s",
+ [rabbit_misc:rs(Resource)])
+ end.
+
+-spec infer_extra_bcc([amqqueue:amqqueue()]) -> [rabbit_amqqueue:name()].
+infer_extra_bcc([]) ->
+ [];
+infer_extra_bcc([Q]) ->
+ case amqqueue:get_options(Q) of
+ #{extra_bcc := BCC} ->
+ #resource{virtual_host = VHost} = amqqueue:get_name(Q),
+ [rabbit_misc:r(VHost, queue, BCC)];
+ _ ->
+ []
+ end;
+infer_extra_bcc(Qs) ->
+ lists:foldl(fun(Q, Acc) ->
+ case amqqueue:get_options(Q) of
+ #{extra_bcc := BCC} ->
+ #resource{virtual_host = VHost} = amqqueue:get_name(Q),
+ [rabbit_misc:r(VHost, queue, BCC) | Acc];
+ _ ->
+ Acc
+ end
+ end, [], Qs).
process_routing_mandatory(_Mandatory = true,
_RoutedToQs = [],
Msg, State) ->
+ rabbit_global_counters:messages_unroutable_returned(amqp091, 1),
ok = basic_return(Msg, State, no_route),
ok;
process_routing_mandatory(_Mandatory = false,
_RoutedToQs = [],
#basic_message{exchange_name = ExchangeName}, State) ->
+ rabbit_global_counters:messages_unroutable_dropped(amqp091, 1),
?INCR_STATS(exchange_stats, ExchangeName, 1, drop_unroutable, State),
ok;
process_routing_mandatory(_, _, _, _) ->
@@ -2163,6 +2302,7 @@ send_confirms_and_nacks(State = #ch{tx = none, confirmed = C, rejected = R}) ->
case rabbit_node_monitor:pause_partition_guard() of
ok ->
Confirms = lists:append(C),
+ rabbit_global_counters:messages_confirmed(amqp091, length(Confirms)),
Rejects = lists:append(R),
ConfirmMsgSeqNos =
lists:foldl(
@@ -2530,6 +2670,9 @@ handle_method(#'queue.delete'{queue = QueueNameBin,
precondition_failed("~s in use", [rabbit_misc:rs(QueueName)]);
{error, not_empty} ->
precondition_failed("~s not empty", [rabbit_misc:rs(QueueName)]);
+ {error, {exit, _, _}} ->
+ %% rabbit_amqqueue:delete()/delegate:invoke might return {error, {exit, _, _}}
+ {ok, 0};
{ok, Count} ->
{ok, Count};
{protocol_error, Type, Reason, ReasonArgs} ->
@@ -2636,8 +2779,9 @@ handle_deliver0(ConsumerTag, AckRequired,
redelivered = Redelivered,
exchange = ExchangeName#resource.name,
routing_key = RoutingKey},
- case rabbit_queue_type:module(QName, Qs) of
- {ok, rabbit_classic_queue} ->
+ {ok, QueueType} = rabbit_queue_type:module(QName, Qs),
+ case QueueType of
+ rabbit_classic_queue ->
ok = rabbit_writer:send_command_and_notify(
WriterPid, QPid, self(), Deliver, Content);
_ ->
@@ -2647,13 +2791,14 @@ handle_deliver0(ConsumerTag, AckRequired,
undefined -> ok;
_ -> rabbit_basic:maybe_gc_large_msg(Content, GCThreshold)
end,
- record_sent(deliver, ConsumerTag, AckRequired, Msg, State).
+ record_sent(deliver, QueueType, ConsumerTag, AckRequired, Msg, State).
handle_basic_get(WriterPid, DeliveryTag, NoAck, MessageCount,
Msg = {_QName, _QPid, _MsgId, Redelivered,
#basic_message{exchange_name = ExchangeName,
routing_keys = [RoutingKey | _CcRoutes],
- content = Content}}, State) ->
+ content = Content}},
+ QueueType, State) ->
ok = rabbit_writer:send_command(
WriterPid,
#'basic.get_ok'{delivery_tag = DeliveryTag,
@@ -2662,7 +2807,7 @@ handle_basic_get(WriterPid, DeliveryTag, NoAck, MessageCount,
routing_key = RoutingKey,
message_count = MessageCount},
Content),
- {noreply, record_sent(get, DeliveryTag, not(NoAck), Msg, State)}.
+ {noreply, record_sent(get, QueueType, DeliveryTag, not(NoAck), Msg, State)}.
init_tick_timer(State = #ch{tick_timer = undefined}) ->
{ok, Interval} = application:get_env(rabbit, channel_tick_interval),
@@ -2698,10 +2843,10 @@ get_operation_timeout_and_deadline() ->
Deadline = now_millis() + Timeout,
{Timeout, Deadline}.
-queue_fold(Fun, Init, Q) ->
- case ?QUEUE:out(Q) of
- {empty, _Q} -> Init;
- {{value, V}, Q1} -> queue_fold(Fun, Fun(V, Init), Q1)
+queue_fold(Fun, Acc, Queue) ->
+ case ?QUEUE:out(Queue) of
+ {empty, _Queue} -> Acc;
+ {{value, Item}, Queue1} -> queue_fold(Fun, Fun(Item, Acc), Queue1)
end.
evaluate_consumer_timeout(State0 = #ch{cfg = #conf{channel = Channel,
@@ -2714,12 +2859,15 @@ evaluate_consumer_timeout(State0 = #ch{cfg = #conf{channel = Channel,
when is_integer(Timeout)
andalso Time < Now - Timeout ->
rabbit_log_channel:warning("Consumer ~s on channel ~w has timed out "
- "waiting on consumer acknowledgement. Timeout used: ~p ms",
+ "waiting for delivery acknowledgement. Timeout used: ~p ms. "
+ "This timeout value can be configured, see consumers doc guide to learn more",
[rabbit_data_coercion:to_binary(ConsumerTag),
Channel, Timeout]),
Ex = rabbit_misc:amqp_error(precondition_failed,
- "consumer ack timed out on channel ~w",
- [Channel], none),
+ "delivery acknowledgement on channel ~w timed out. "
+ "Timeout value used: ~p ms. "
+ "This timeout value can be configured, see consumers doc guide to learn more",
+ [Channel, Timeout], none),
handle_exception(Ex, State0);
_ ->
{noreply, State0}
@@ -2795,3 +2943,14 @@ find_queue_name_from_quorum_name(Name, QStates) ->
end
end,
rabbit_queue_type:fold_state(Fun, undefined, QStates).
+
+maybe_increase_global_publishers(#ch{publishing_mode = true} = State0) ->
+ State0;
+maybe_increase_global_publishers(State0) ->
+ rabbit_global_counters:publisher_created(amqp091),
+ State0#ch{publishing_mode = true}.
+
+maybe_decrease_global_publishers(#ch{publishing_mode = true}) ->
+ ok;
+maybe_decrease_global_publishers(#ch{publishing_mode = false}) ->
+ rabbit_global_counters:publisher_deleted(amqp091).
diff --git a/deps/rabbit/src/rabbit_channel_interceptor.erl b/deps/rabbit/src/rabbit_channel_interceptor.erl
index c40b437f10..38d4456549 100644
--- a/deps/rabbit/src/rabbit_channel_interceptor.erl
+++ b/deps/rabbit/src/rabbit_channel_interceptor.erl
@@ -2,13 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_channel_interceptor).
--include("rabbit_framing.hrl").
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([init/1, intercept_in/3]).
@@ -29,7 +29,7 @@
-callback init(rabbit_channel:channel()) -> interceptor_state().
-callback intercept(original_method(), original_content(),
interceptor_state()) ->
- {processed_method(), processed_content()} |
+ {processed_method(), processed_content()} | rabbit_types:amqp_error() |
rabbit_misc:channel_or_connection_exit().
-callback applies_to() -> list(method_name()).
@@ -53,8 +53,7 @@ check_no_overlap1(Sets) ->
case sets:size(Is) of
0 -> ok;
_ ->
- internal_error("Interceptor: more than one "
- "module handles ~p~n", [Is])
+ internal_error("Interceptor: more than one module handles ~p", [Is])
end,
sets:union(Set, Union)
end,
@@ -89,7 +88,9 @@ validate_response(Mod, M1, C1, R = {M2, C2}) ->
"content iff content is provided but "
"content in = ~p; content out = ~p",
[Mod, C1, C2])
- end.
+ end;
+validate_response(_Mod, _M1, _C1, AMQPError = #amqp_error{}) ->
+ internal_error(AMQPError).
validate_method(M, M2) ->
rabbit_misc:method_record_type(M) =:= rabbit_misc:method_record_type(M2).
@@ -99,6 +100,12 @@ validate_content(#content{}, #content{}) -> true;
validate_content(_, _) -> false.
%% keep dialyzer happy
--spec internal_error(string(), [any()]) -> no_return().
+-spec internal_error(rabbit_types:amqp_error()) ->
+ rabbit_misc:channel_or_connection_exit().
+internal_error(AMQPError = #amqp_error{}) ->
+ rabbit_misc:protocol_error(AMQPError).
+
+-spec internal_error(string(), [any()]) ->
+ rabbit_misc:channel_or_connection_exit().
internal_error(Format, Args) ->
rabbit_misc:protocol_error(internal_error, Format, Args).
diff --git a/deps/rabbit/src/rabbit_channel_sup.erl b/deps/rabbit/src/rabbit_channel_sup.erl
index 0d405ad3a7..0b54fdfd54 100644
--- a/deps/rabbit/src/rabbit_channel_sup.erl
+++ b/deps/rabbit/src/rabbit_channel_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_channel_sup).
@@ -23,7 +23,7 @@
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
diff --git a/deps/rabbit/src/rabbit_channel_sup_sup.erl b/deps/rabbit/src/rabbit_channel_sup_sup.erl
index 72cf38d6c8..6c7d4f8808 100644
--- a/deps/rabbit/src/rabbit_channel_sup_sup.erl
+++ b/deps/rabbit/src/rabbit_channel_sup_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_channel_sup_sup).
@@ -18,7 +18,7 @@
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
diff --git a/deps/rabbit/src/rabbit_channel_tracking.erl b/deps/rabbit/src/rabbit_channel_tracking.erl
index 42ab664a06..106e95cf1e 100644
--- a/deps/rabbit/src/rabbit_channel_tracking.erl
+++ b/deps/rabbit/src/rabbit_channel_tracking.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_channel_tracking).
@@ -32,7 +32,7 @@
get_all_tracked_channel_table_names_for_node/1,
delete_tracked_channel_user_entry/1]).
--include_lib("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-import(rabbit_misc, [pget/2]).
@@ -86,7 +86,7 @@ handle_cast({channel_created, Details}) ->
ok
end;
handle_cast({channel_closed, Details}) ->
- %% channel has terminated, unregister iff local
+ %% channel has terminated, unregister if local
case get_tracked_channel_by_pid(pget(pid, Details)) of
[#tracked_channel{name = Name}] ->
unregister_tracked(rabbit_tracking:id(node(), Name));
@@ -99,8 +99,8 @@ handle_cast({connection_closed, ConnDetails}) ->
case pget(node, ConnDetails) of
ThisNode ->
TrackedChs = get_tracked_channels_by_connection_pid(ConnPid),
- rabbit_log_connection:info(
- "Closing all channels from connection '~p' "
+ rabbit_log_channel:debug(
+ "Closing all channels from connection '~s' "
"because it has been closed", [pget(name, ConnDetails)]),
%% Shutting down channels will take care of unregistering the
%% corresponding tracking.
@@ -117,7 +117,7 @@ handle_cast({user_deleted, Details}) ->
ok;
handle_cast({node_deleted, Details}) ->
Node = pget(node, Details),
- rabbit_log_connection:info(
+ rabbit_log_channel:info(
"Node '~s' was removed from the cluster, deleting"
" its channel tracking tables...", [Node]),
delete_tracked_channels_table_for_node(Node),
@@ -179,7 +179,18 @@ list() ->
lists:foldl(
fun (Node, Acc) ->
Tab = tracked_channel_table_name_for(Node),
- Acc ++ mnesia:dirty_match_object(Tab, #tracked_channel{_ = '_'})
+ try
+ Acc ++
+ mnesia:dirty_match_object(Tab, #tracked_channel{_ = '_'})
+ catch
+ exit:{aborted, {no_exists, [Tab, _]}} ->
+ %% The table might not exist yet (or is already gone)
+ %% between the time rabbit_nodes:all_running() runs and
+ %% returns a specific node, and
+ %% mnesia:dirty_match_object() is called for that node's
+ %% table.
+ Acc
+ end
end, [], rabbit_nodes:all_running()).
-spec list_of_user(rabbit_types:username()) -> [rabbit_types:tracked_channel()].
diff --git a/deps/rabbit/src/rabbit_channel_tracking_handler.erl b/deps/rabbit/src/rabbit_channel_tracking_handler.erl
index 0cbe02f39e..8d436829a4 100644
--- a/deps/rabbit/src/rabbit_channel_tracking_handler.erl
+++ b/deps/rabbit/src/rabbit_channel_tracking_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_channel_tracking_handler).
@@ -21,7 +21,7 @@
-export([init/1, handle_call/2, handle_event/2, handle_info/2,
terminate/2, code_change/3]).
--include_lib("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-rabbit_boot_step({?MODULE,
[{description, "channel tracking event handler"},
diff --git a/deps/rabbit/src/rabbit_classic_queue.erl b/deps/rabbit/src/rabbit_classic_queue.erl
index e53c0aecc2..b720cfc96e 100644
--- a/deps/rabbit/src/rabbit_classic_queue.erl
+++ b/deps/rabbit/src/rabbit_classic_queue.erl
@@ -38,7 +38,8 @@
dequeue/4,
info/2,
state_info/1,
- capabilities/0
+ capabilities/0,
+ notify_decorators/1
]).
-export([delete_crashed/1,
@@ -54,11 +55,13 @@ is_enabled() -> true.
declare(Q, Node) when ?amqqueue_is_classic(Q) ->
QName = amqqueue:get_name(Q),
VHost = amqqueue:get_vhost(Q),
- Node1 = case Node of
- {ignore_location, Node0} ->
+ Node1 = case {Node, rabbit_amqqueue:is_exclusive(Q)} of
+ {{ignore_location, Node0}, _} ->
Node0;
+ {_, true} ->
+ Node;
_ ->
- case rabbit_queue_master_location_misc:get_location(Q) of
+ case rabbit_queue_master_location_misc:get_location(Q) of
{ok, Node0} -> Node0;
_ -> Node
end
@@ -143,11 +146,11 @@ stat(Q) ->
delegate:invoke(amqqueue:get_pid(Q),
{gen_server2, call, [stat, infinity]}).
--spec init(amqqueue:amqqueue()) -> state().
+-spec init(amqqueue:amqqueue()) -> {ok, state()}.
init(Q) when ?amqqueue_is_classic(Q) ->
QName = amqqueue:get_name(Q),
- #?STATE{pid = amqqueue:get_pid(Q),
- qref = QName}.
+ {ok, #?STATE{pid = amqqueue:get_pid(Q),
+ qref = QName}}.
-spec close(state()) -> ok.
close(_State) ->
@@ -298,9 +301,9 @@ settlement_action(Type, QRef, MsgSeqs, Acc) ->
deliver(Qs0, #delivery{flow = Flow,
msg_seq_no = MsgNo,
message = #basic_message{exchange_name = _Ex},
- confirm = _Confirm} = Delivery) ->
+ confirm = Confirm} = Delivery) ->
%% TODO: record master and slaves for confirm processing
- {MPids, SPids, Qs, Actions} = qpids(Qs0, MsgNo),
+ {MPids, SPids, Qs, Actions} = qpids(Qs0, Confirm, MsgNo),
QPids = MPids ++ SPids,
case Flow of
%% Here we are tracking messages sent by the rabbit_channel
@@ -360,7 +363,7 @@ purge(Q) when ?is_amqqueue(Q) ->
QPid = amqqueue:get_pid(Q),
delegate:invoke(QPid, {gen_server2, call, [purge, infinity]}).
-qpids(Qs, MsgNo) ->
+qpids(Qs, Confirm, MsgNo) ->
lists:foldl(
fun ({Q, S0}, {MPidAcc, SPidAcc, Qs0, Actions0}) ->
QPid = amqqueue:get_pid(Q),
@@ -368,14 +371,14 @@ qpids(Qs, MsgNo) ->
QRef = amqqueue:get_name(Q),
Actions = [{monitor, QPid, QRef}
| [{monitor, P, QRef} || P <- SPids]] ++ Actions0,
- %% confirm record only if MsgNo isn't undefined
+ %% confirm record only if necessary
S = case S0 of
#?STATE{unconfirmed = U0} ->
Rec = [QPid | SPids],
- U = case MsgNo of
- undefined ->
+ U = case Confirm of
+ false ->
U0;
- _ ->
+ true ->
U0#{MsgNo => #msg_status{pending = Rec}}
end,
S0#?STATE{pid = QPid,
@@ -436,19 +439,14 @@ recover_durable_queues(QueuesAndRecoveryTerms) ->
gen_server2:mcall(
[{rabbit_amqqueue_sup_sup:start_queue_process(node(), Q, recovery),
{init, {self(), Terms}}} || {Q, Terms} <- QueuesAndRecoveryTerms]),
- [rabbit_log:error("Queue ~p failed to initialise: ~p~n",
+ [rabbit_log:error("Queue ~p failed to initialise: ~p",
[Pid, Error]) || {Pid, Error} <- Failures],
[Q || {_, {new, Q}} <- Results].
capabilities() ->
- #{policies => [<<"expires">>, <<"message-ttl">>, <<"dead-letter-exchange">>,
- <<"dead-letter-routing-key">>, <<"max-length">>,
- <<"max-length-bytes">>, <<"max-in-memory-length">>, <<"max-in-memory-bytes">>,
- <<"max-priority">>, <<"overflow">>, <<"queue-mode">>,
- <<"single-active-consumer">>, <<"delivery-limit">>,
- <<"ha-mode">>, <<"ha-params">>, <<"ha-sync-mode">>,
- <<"ha-promote-on-shutdown">>, <<"ha-promote-on-failure">>,
- <<"queue-master-locator">>],
+ #{unsupported_policies => [ %% Stream policies
+ <<"max-age">>, <<"stream-max-segment-size-bytes">>,
+ <<"queue-leader-locator">>, <<"initial-cluster-size">>],
queue_arguments => [<<"x-expires">>, <<"x-message-ttl">>, <<"x-dead-letter-exchange">>,
<<"x-dead-letter-routing-key">>, <<"x-max-length">>,
<<"x-max-length-bytes">>, <<"x-max-in-memory-length">>,
@@ -460,6 +458,10 @@ capabilities() ->
],
server_named => true}.
+notify_decorators(Q) when ?is_amqqueue(Q) ->
+ QPid = amqqueue:get_pid(Q),
+ delegate:invoke_no_result(QPid, {gen_server2, cast, [notify_decorators]}).
+
reject_seq_no(SeqNo, U0) ->
reject_seq_no(SeqNo, U0, []).
diff --git a/deps/rabbit/src/rabbit_client_sup.erl b/deps/rabbit/src/rabbit_client_sup.erl
index a28e4ce39c..dc80025796 100644
--- a/deps/rabbit/src/rabbit_client_sup.erl
+++ b/deps/rabbit/src/rabbit_client_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_client_sup).
@@ -13,7 +13,7 @@
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
diff --git a/deps/rabbit/src/rabbit_connection_helper_sup.erl b/deps/rabbit/src/rabbit_connection_helper_sup.erl
index d0509029fd..406e8c95fb 100644
--- a/deps/rabbit/src/rabbit_connection_helper_sup.erl
+++ b/deps/rabbit/src/rabbit_connection_helper_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_connection_helper_sup).
@@ -24,7 +24,7 @@
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
diff --git a/deps/rabbit/src/rabbit_connection_sup.erl b/deps/rabbit/src/rabbit_connection_sup.erl
index c1d1bd0d77..173015d7c4 100644
--- a/deps/rabbit/src/rabbit_connection_sup.erl
+++ b/deps/rabbit/src/rabbit_connection_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_connection_sup).
@@ -19,18 +19,18 @@
-behaviour(supervisor2).
-behaviour(ranch_protocol).
--export([start_link/4, reader/1]).
+-export([start_link/3, reader/1]).
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
--spec start_link(any(), rabbit_net:socket(), module(), any()) ->
+-spec start_link(any(), module(), any()) ->
{'ok', pid(), pid()}.
-start_link(Ref, _Sock, _Transport, _Opts) ->
+start_link(Ref, _Transport, _Opts) ->
{ok, SupPid} = supervisor2:start_link(?MODULE, []),
%% We need to get channels in the hierarchy here so they get shut
%% down after the reader, so the reader gets a chance to terminate
diff --git a/deps/rabbit/src/rabbit_connection_tracking.erl b/deps/rabbit/src/rabbit_connection_tracking.erl
index c0704e6a7c..2ec96e621f 100644
--- a/deps/rabbit/src/rabbit_connection_tracking.erl
+++ b/deps/rabbit/src/rabbit_connection_tracking.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_connection_tracking).
@@ -53,7 +53,7 @@
lookup/1,
count/0]).
--include_lib("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-import(rabbit_misc, [pget/2]).
@@ -351,7 +351,18 @@ list() ->
lists:foldl(
fun (Node, Acc) ->
Tab = tracked_connection_table_name_for(Node),
- Acc ++ mnesia:dirty_match_object(Tab, #tracked_connection{_ = '_'})
+ try
+ Acc ++
+ mnesia:dirty_match_object(Tab, #tracked_connection{_ = '_'})
+ catch
+ exit:{aborted, {no_exists, [Tab, _]}} ->
+ %% The table might not exist yet (or is already gone)
+ %% between the time rabbit_nodes:all_running() runs and
+ %% returns a specific node, and
+ %% mnesia:dirty_match_object() is called for that node's
+ %% table.
+ Acc
+ end
end, [], rabbit_nodes:all_running()).
-spec count() -> non_neg_integer().
@@ -360,6 +371,9 @@ count() ->
lists:foldl(
fun (Node, Acc) ->
Tab = tracked_connection_table_name_for(Node),
+ %% mnesia:table_info() returns 0 if the table doesn't exist. We
+ %% don't need the same kind of protection as the list() function
+ %% above.
Acc + mnesia:table_info(Tab, size)
end, 0, rabbit_nodes:all_running()).
@@ -463,6 +477,7 @@ tracked_connection_from_connection_created(EventDetails) ->
username = pget(user, EventDetails),
connected_at = pget(connected_at, EventDetails),
pid = pget(pid, EventDetails),
+ protocol = pget(protocol, EventDetails),
type = pget(type, EventDetails),
peer_host = pget(peer_host, EventDetails),
peer_port = pget(peer_port, EventDetails)}.
@@ -512,4 +527,8 @@ close_connection(#tracked_connection{pid = Pid, type = network}, Message) ->
close_connection(#tracked_connection{pid = Pid, type = direct}, Message) ->
%% Do an RPC call to the node running the direct client.
Node = node(Pid),
- rpc:call(Node, amqp_direct_connection, server_close, [Pid, 320, Message]).
+ rpc:call(Node, amqp_direct_connection, server_close, [Pid, 320, Message]);
+close_connection(#tracked_connection{pid = Pid}, Message) ->
+ % best effort, this will work for connections to the stream plugin
+ Node = node(Pid),
+ rpc:call(Node, gen_server, call, [Pid, {shutdown, Message}, infinity]).
diff --git a/deps/rabbit/src/rabbit_connection_tracking_handler.erl b/deps/rabbit/src/rabbit_connection_tracking_handler.erl
index 17085d805a..f9df05431d 100644
--- a/deps/rabbit/src/rabbit_connection_tracking_handler.erl
+++ b/deps/rabbit/src/rabbit_connection_tracking_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_connection_tracking_handler).
@@ -21,7 +21,7 @@
%% for compatibility with previous versions of CLI tools
-export([close_connections/3]).
--include_lib("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-rabbit_boot_step({?MODULE,
[{description, "connection tracking event handler"},
diff --git a/deps/rabbit/src/rabbit_control_pbe.erl b/deps/rabbit/src/rabbit_control_pbe.erl
index 95c4fe41f1..4086dbdc33 100644
--- a/deps/rabbit/src/rabbit_control_pbe.erl
+++ b/deps/rabbit/src/rabbit_control_pbe.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_control_pbe).
diff --git a/deps/rabbit/src/rabbit_core_ff.erl b/deps/rabbit/src/rabbit_core_ff.erl
index 6d30846775..7719d3d48d 100644
--- a/deps/rabbit/src/rabbit_core_ff.erl
+++ b/deps/rabbit/src/rabbit_core_ff.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_core_ff).
@@ -25,7 +25,7 @@
-rabbit_feature_flag(
{stream_queue,
#{desc => "Support queues of type `stream`",
- doc_url => "https://www.rabbitmq.com/stream-queues.html",
+ doc_url => "https://www.rabbitmq.com/stream.html",
stability => stable,
depends_on => [quorum_queue],
migration_fun => {?MODULE, stream_queue_migration}
diff --git a/deps/rabbit/src/rabbit_core_metrics_gc.erl b/deps/rabbit/src/rabbit_core_metrics_gc.erl
index 890c127586..156517c97e 100644
--- a/deps/rabbit/src/rabbit_core_metrics_gc.erl
+++ b/deps/rabbit/src/rabbit_core_metrics_gc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_core_metrics_gc).
@@ -102,7 +102,7 @@ gc_exchanges() ->
gc_process_and_entity(channel_exchange_metrics, GbSet).
gc_nodes() ->
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ Nodes = rabbit_nodes:all(),
GbSet = gb_sets:from_list(Nodes),
gc_entity(node_node_metrics, GbSet).
diff --git a/deps/rabbit/src/rabbit_credential_validation.erl b/deps/rabbit/src/rabbit_credential_validation.erl
index 8712628ade..0075f5a58a 100644
--- a/deps/rabbit/src/rabbit_credential_validation.erl
+++ b/deps/rabbit/src/rabbit_credential_validation.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_credential_validation).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%% used for backwards compatibility
-define(DEFAULT_BACKEND, rabbit_credential_validator_accept_everything).
diff --git a/deps/rabbit/src/rabbit_credential_validator.erl b/deps/rabbit/src/rabbit_credential_validator.erl
index 3b5d0752bf..50ab695501 100644
--- a/deps/rabbit/src/rabbit_credential_validator.erl
+++ b/deps/rabbit/src/rabbit_credential_validator.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_credential_validator).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%% Validates a password. Used by `rabbit_auth_backend_internal`.
%%
diff --git a/deps/rabbit/src/rabbit_credential_validator_accept_everything.erl b/deps/rabbit/src/rabbit_credential_validator_accept_everything.erl
index fea10fd4b6..413b1548bf 100644
--- a/deps/rabbit/src/rabbit_credential_validator_accept_everything.erl
+++ b/deps/rabbit/src/rabbit_credential_validator_accept_everything.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_credential_validator_accept_everything).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_credential_validator).
diff --git a/deps/rabbit/src/rabbit_credential_validator_min_password_length.erl b/deps/rabbit/src/rabbit_credential_validator_min_password_length.erl
index 463090127f..1b4e6f7ac3 100644
--- a/deps/rabbit/src/rabbit_credential_validator_min_password_length.erl
+++ b/deps/rabbit/src/rabbit_credential_validator_min_password_length.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_credential_validator_min_password_length).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_credential_validator).
diff --git a/deps/rabbit/src/rabbit_credential_validator_password_regexp.erl b/deps/rabbit/src/rabbit_credential_validator_password_regexp.erl
index dc64cf1d31..f37ef1d1a8 100644
--- a/deps/rabbit/src/rabbit_credential_validator_password_regexp.erl
+++ b/deps/rabbit/src/rabbit_credential_validator_password_regexp.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
@@ -10,7 +10,7 @@
%% password against a pre-configured regular expression.
-module(rabbit_credential_validator_password_regexp).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_credential_validator).
diff --git a/deps/rabbit/src/rabbit_dead_letter.erl b/deps/rabbit/src/rabbit_dead_letter.erl
index 755de5cf53..f13b409dce 100644
--- a/deps/rabbit/src/rabbit_dead_letter.erl
+++ b/deps/rabbit/src/rabbit_dead_letter.erl
@@ -2,15 +2,15 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_dead_letter).
-export([publish/5]).
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
%%----------------------------------------------------------------------------
@@ -140,7 +140,7 @@ update_x_death_header(Info, Headers) ->
[{table, rabbit_misc:sort_field_table(Info1)} | Others]);
{<<"x-death">>, InvalidType, Header} ->
rabbit_log:warning("Message has invalid x-death header (type: ~p)."
- " Resetting header ~p~n",
+ " Resetting header ~p",
[InvalidType, Header]),
%% if x-death is something other than an array (list)
%% then we reset it: this happens when some clients consume
@@ -247,7 +247,7 @@ log_cycle_once(Queues) ->
true -> ok;
undefined -> rabbit_log:warning(
"Message dropped. Dead-letter queues cycle detected" ++
- ": ~p~nThis cycle will NOT be reported again.~n",
+ ": ~p~nThis cycle will NOT be reported again.",
[Queues]),
put(Key, true)
end.
diff --git a/deps/rabbit/src/rabbit_definitions.erl b/deps/rabbit/src/rabbit_definitions.erl
index 0d0212dbae..152a6be5f2 100644
--- a/deps/rabbit/src/rabbit_definitions.erl
+++ b/deps/rabbit/src/rabbit_definitions.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_definitions).
@@ -10,8 +10,13 @@
-export([boot/0]).
%% automatic import on boot
--export([maybe_load_definitions/0, maybe_load_definitions/2, maybe_load_definitions_from/2,
- has_configured_definitions_to_load/0]).
+-export([
+ maybe_load_definitions/0,
+ maybe_load_definitions/2,
+ maybe_load_definitions_from/2,
+
+ has_configured_definitions_to_load/0
+]).
%% import
-export([import_raw/1, import_raw/2, import_parsed/1, import_parsed/2,
apply_defs/2, apply_defs/3, apply_defs/4, apply_defs/5]).
@@ -25,7 +30,8 @@
]).
-export([decode/1, decode/2, args/1]).
--import(rabbit_misc, [pget/2]).
+-import(rabbit_misc, [pget/2, pget/3]).
+-import(rabbit_data_coercion, [to_binary/1]).
%%
%% API
@@ -58,9 +64,10 @@ boot() ->
rabbit_sup:start_supervisor_child(definition_import_pool_sup, worker_pool_sup, [PoolSize, ?IMPORT_WORK_POOL]).
maybe_load_definitions() ->
- %% Note that management.load_definitions is handled in the plugin for backwards compatibility.
- %% This executes the "core" version of load_definitions.
- maybe_load_definitions(rabbit, load_definitions).
+ %% Classic source: local file or data directory
+ maybe_load_definitions_from_local_filesystem(rabbit, load_definitions),
+ %% Extensible sources
+ maybe_load_definitions_from_pluggable_source(rabbit, definitions).
-spec import_raw(Body :: binary() | iolist()) -> ok | {error, term()}.
import_raw(Body) ->
@@ -125,63 +132,76 @@ all_definitions() ->
exchanges => Xs
}.
+-spec has_configured_definitions_to_load() -> boolean().
+has_configured_definitions_to_load() ->
+ has_configured_definitions_to_load_via_classic_option() or has_configured_definitions_to_load_via_modern_option().
+
+%% Retained for backwards compatibility, implicitly assumes the local filesystem source
+maybe_load_definitions(App, Key) ->
+ maybe_load_definitions_from_local_filesystem(App, Key).
+
+maybe_load_definitions_from(IsDir, Path) ->
+ rabbit_definitions_import_local_filesystem:load(IsDir, Path).
+
%%
%% Implementation
%%
--spec has_configured_definitions_to_load() -> boolean().
-has_configured_definitions_to_load() ->
+-spec has_configured_definitions_to_load_via_modern_option() -> boolean().
+has_configured_definitions_to_load_via_modern_option() ->
+ case application:get_env(rabbit, definitions) of
+ undefined -> false;
+ {ok, none} -> false;
+ {ok, []} -> false;
+ {ok, _Options} -> true
+ end.
+
+has_configured_definitions_to_load_via_classic_option() ->
case application:get_env(rabbit, load_definitions) of
undefined -> false;
{ok, none} -> false;
{ok, _Path} -> true
end.
-maybe_load_definitions(App, Key) ->
+maybe_load_definitions_from_local_filesystem(App, Key) ->
case application:get_env(App, Key) of
- undefined ->
- rabbit_log:debug("No definition file configured to import via load_definitions"),
- ok;
- {ok, none} ->
- rabbit_log:debug("No definition file configured to import via load_definitions"),
- ok;
- {ok, FileOrDir} ->
- rabbit_log:debug("Will import definitions file from load_definitions"),
- IsDir = filelib:is_dir(FileOrDir),
- maybe_load_definitions_from(IsDir, FileOrDir)
+ undefined -> ok;
+ {ok, none} -> ok;
+ {ok, Path} ->
+ IsDir = filelib:is_dir(Path),
+ rabbit_definitions_import_local_filesystem:load(IsDir, Path)
end.
-maybe_load_definitions_from(true, Dir) ->
- rabbit_log:info("Applying definitions from directory ~s", [Dir]),
- load_definitions_from_files(file:list_dir(Dir), Dir);
-maybe_load_definitions_from(false, File) ->
- load_definitions_from_file(File).
-
-load_definitions_from_files({ok, Filenames0}, Dir) ->
- Filenames1 = lists:sort(Filenames0),
- Filenames2 = [filename:join(Dir, F) || F <- Filenames1],
- load_definitions_from_filenames(Filenames2);
-load_definitions_from_files({error, E}, Dir) ->
- rabbit_log:error("Could not read definitions from directory ~s, Error: ~p", [Dir, E]),
- {error, {could_not_read_defs, E}}.
-
-load_definitions_from_filenames([]) ->
- ok;
-load_definitions_from_filenames([File|Rest]) ->
- case load_definitions_from_file(File) of
- ok -> load_definitions_from_filenames(Rest);
- {error, E} -> {error, {failed_to_import_definitions, File, E}}
+maybe_load_definitions_from_pluggable_source(App, Key) ->
+ case application:get_env(App, Key) of
+ undefined -> ok;
+ {ok, none} -> ok;
+ {ok, []} -> ok;
+ {ok, Proplist} ->
+ case pget(import_backend, Proplist, undefined) of
+ undefined ->
+ {error, "definition import source is configured but definitions.import_backend is not set"};
+ ModOrAlias ->
+ Mod = normalize_backend_module(ModOrAlias),
+ rabbit_log:debug("Will use module ~s to import definitions", [Mod]),
+ Mod:load(Proplist)
+ end
end.
-load_definitions_from_file(File) ->
- case file:read_file(File) of
- {ok, Body} ->
- rabbit_log:info("Applying definitions from file at '~s'", [File]),
- import_raw(Body);
- {error, E} ->
- rabbit_log:error("Could not read definitions from file at '~s', error: ~p", [File, E]),
- {error, {could_not_read_defs, {File, E}}}
- end.
+normalize_backend_module(local_filesystem) ->
+ rabbit_definitions_import_local_filesystem;
+normalize_backend_module(local) ->
+ rabbit_definitions_import_local_filesystem;
+normalize_backend_module(https) ->
+ rabbit_definitions_import_https;
+normalize_backend_module(http) ->
+ rabbit_definitions_import_https;
+normalize_backend_module(rabbitmq_definitions_import_local_filesystem) ->
+ rabbit_definitions_import_local_filesystem;
+normalize_backend_module(rabbitmq_definitions_import_https) ->
+ rabbit_definitions_import_https;
+normalize_backend_module(Other) ->
+ Other.
decode(Keys, Body) ->
case decode(Body) of
@@ -236,14 +256,17 @@ apply_defs(Map, ActingUser, SuccessFun) when is_function(SuccessFun) ->
validate_limits(Map),
concurrent_for_all(permissions, ActingUser, Map, fun add_permission/2),
concurrent_for_all(topic_permissions, ActingUser, Map, fun add_topic_permission/2),
- sequential_for_all(parameters, ActingUser, Map, fun add_parameter/2),
+
+ concurrent_for_all(queues, ActingUser, Map, fun add_queue/2),
+ concurrent_for_all(exchanges, ActingUser, Map, fun add_exchange/2),
+ concurrent_for_all(bindings, ActingUser, Map, fun add_binding/2),
+
sequential_for_all(global_parameters, ActingUser, Map, fun add_global_parameter/2),
%% importing policies concurrently can be unsafe as queues will be getting
%% potentially out of order notifications of applicable policy changes
sequential_for_all(policies, ActingUser, Map, fun add_policy/2),
- concurrent_for_all(queues, ActingUser, Map, fun add_queue/2),
- concurrent_for_all(exchanges, ActingUser, Map, fun add_exchange/2),
- concurrent_for_all(bindings, ActingUser, Map, fun add_binding/2),
+ sequential_for_all(parameters, ActingUser, Map, fun add_parameter/2),
+
SuccessFun(),
ok
catch {error, E} -> {error, E};
@@ -260,13 +283,16 @@ apply_defs(Map, ActingUser, SuccessFun, VHost) when is_binary(VHost) ->
[VHost, ActingUser]),
try
validate_limits(Map, VHost),
+
+ concurrent_for_all(queues, ActingUser, Map, VHost, fun add_queue/3),
+ concurrent_for_all(exchanges, ActingUser, Map, VHost, fun add_exchange/3),
+ concurrent_for_all(bindings, ActingUser, Map, VHost, fun add_binding/3),
+
sequential_for_all(parameters, ActingUser, Map, VHost, fun add_parameter/3),
%% importing policies concurrently can be unsafe as queues will be getting
%% potentially out of order notifications of applicable policy changes
sequential_for_all(policies, ActingUser, Map, VHost, fun add_policy/3),
- concurrent_for_all(queues, ActingUser, Map, VHost, fun add_queue/3),
- concurrent_for_all(exchanges, ActingUser, Map, VHost, fun add_exchange/3),
- concurrent_for_all(bindings, ActingUser, Map, VHost, fun add_binding/3),
+
SuccessFun()
catch {error, E} -> {error, format(E)};
exit:E -> {error, format(E)}
@@ -283,13 +309,16 @@ apply_defs(Map, ActingUser, SuccessFun, ErrorFun, VHost) ->
[VHost, ActingUser]),
try
validate_limits(Map, VHost),
+
+ concurrent_for_all(queues, ActingUser, Map, VHost, fun add_queue/3),
+ concurrent_for_all(exchanges, ActingUser, Map, VHost, fun add_exchange/3),
+ concurrent_for_all(bindings, ActingUser, Map, VHost, fun add_binding/3),
+
sequential_for_all(parameters, ActingUser, Map, VHost, fun add_parameter/3),
%% importing policies concurrently can be unsafe as queues will be getting
%% potentially out of order notifications of applicable policy changes
sequential_for_all(policies, ActingUser, Map, VHost, fun add_policy/3),
- concurrent_for_all(queues, ActingUser, Map, VHost, fun add_queue/3),
- concurrent_for_all(exchanges, ActingUser, Map, VHost, fun add_exchange/3),
- concurrent_for_all(bindings, ActingUser, Map, VHost, fun add_binding/3),
+
SuccessFun()
catch {error, E} -> ErrorFun(format(E));
exit:E -> ErrorFun(format(E))
@@ -435,6 +464,10 @@ add_policy(Param, Username) ->
add_policy(VHost, Param, Username) ->
Key = maps:get(name, Param, undefined),
+ case Key of
+ undefined -> exit(rabbit_misc:format("policy in virtual host '~s' has undefined name", [VHost]));
+ _ -> ok
+ end,
case rabbit_policy:set(
VHost, Key, maps:get(pattern, Param, undefined),
case maps:get(definition, Param, undefined) of
@@ -452,11 +485,13 @@ add_policy(VHost, Param, Username) ->
-spec add_vhost(map(), rabbit_types:username()) -> ok.
add_vhost(VHost, ActingUser) ->
- VHostName = maps:get(name, VHost, undefined),
- VHostTrace = maps:get(tracing, VHost, undefined),
- VHostDefinition = maps:get(definition, VHost, undefined),
- VHostTags = maps:get(tags, VHost, undefined),
- rabbit_vhost:put_vhost(VHostName, VHostDefinition, VHostTags, VHostTrace, ActingUser).
+ Name = maps:get(name, VHost, undefined),
+ IsTracingEnabled = maps:get(tracing, VHost, undefined),
+ Metadata = rabbit_data_coercion:atomize_keys(maps:get(metadata, VHost, #{})),
+ Description = maps:get(description, VHost, maps:get(description, Metadata, <<"">>)),
+ Tags = maps:get(tags, VHost, maps:get(tags, Metadata, [])),
+
+ rabbit_vhost:put_vhost(Name, Description, Tags, IsTracingEnabled, ActingUser).
add_permission(Permission, ActingUser) ->
rabbit_auth_backend_internal:set_permissions(maps:get(user, Permission, undefined),
@@ -623,6 +658,7 @@ get_or_missing(K, L) ->
V -> V
end.
+args(undefined) -> args(#{});
args([]) -> args(#{});
args(L) -> rabbit_misc:to_amqp_table(L).
@@ -764,4 +800,4 @@ topic_permission_definition(P0) ->
maps:from_list(P).
tags_as_binaries(Tags) ->
- list_to_binary(string:join([atom_to_list(T) || T <- Tags], ",")).
+ [to_binary(T) || T <- Tags].
diff --git a/deps/rabbit/src/rabbit_definitions_import_https.erl b/deps/rabbit/src/rabbit_definitions_import_https.erl
new file mode 100644
index 0000000000..f239400edd
--- /dev/null
+++ b/deps/rabbit/src/rabbit_definitions_import_https.erl
@@ -0,0 +1,84 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_definitions_import_https).
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-export([
+ is_enabled/0,
+ load/1
+]).
+
+
+
+-import(rabbit_misc, [pget/2, pget/3]).
+-import(rabbit_data_coercion, [to_binary/1]).
+-import(rabbit_definitions, [import_raw/1]).
+
+%%
+%% API
+%%
+
+-spec is_enabled() -> boolean().
+is_enabled() ->
+ case application:get_env(rabbit, definitions) of
+ undefined -> false;
+ {ok, none} -> false;
+ {ok, []} -> false;
+ {ok, Proplist} ->
+ case proplists:get_value(import_backend, Proplist, undefined) of
+ undefined -> false;
+ ?MODULE -> true;
+ _ -> false
+ end
+ end.
+
+load(Proplist) ->
+ rabbit_log:debug("Definitions proprties: ~p", [Proplist]),
+ URL = pget(url, Proplist),
+ TLSOptions0 = [
+ %% avoids a peer verification warning emitted by default if no certificate chain and peer verification
+ %% settings are provided: these are not essential in this particular case (client-side downloads that likely
+ %% will happen from a local trusted source)
+ {log_level, error},
+ %% use TLSv1.2 by default
+ {versions, ['tlsv1.2']}
+ ],
+ TLSOptions = pget(ssl_options, Proplist, TLSOptions0),
+ HTTPOptions = [
+ {ssl, TLSOptions}
+ ],
+ load_from_url(URL, HTTPOptions).
+
+
+%%
+%% Implementation
+%%
+
+load_from_url(URL, HTTPOptions0) ->
+ inets:start(),
+ Options = [
+ {body_format, binary}
+ ],
+ HTTPOptions = HTTPOptions0 ++ [
+ {connect_timeout, 120000},
+ {autoredirect, true}
+ ],
+ rabbit_log:info("Applying definitions from remote URL"),
+ case httpc:request(get, {URL, []}, lists:usort(HTTPOptions), Options) of
+ %% 2XX
+ {ok, {{_, Code, _}, _Headers, Body}} when Code div 100 == 2 ->
+ rabbit_log:debug("Requested definitions from remote URL '~s', response code: ~b", [URL, Code]),
+ rabbit_log:debug("Requested definitions from remote URL '~s', body: ~p", [URL, Body]),
+ import_raw(Body);
+ {ok, {{_, Code, _}, _Headers, _Body}} when Code >= 400 ->
+ rabbit_log:debug("Requested definitions from remote URL '~s', response code: ~b", [URL, Code]),
+ {error, {could_not_read_defs, {URL, rabbit_misc:format("URL request failed with response code ~b", [Code])}}};
+ {error, Reason} ->
+ rabbit_log:error("Requested definitions from remote URL '~s', error: ~p", [URL, Reason]),
+ {error, {could_not_read_defs, {URL, Reason}}}
+ end.
diff --git a/deps/rabbit/src/rabbit_definitions_import_local_filesystem.erl b/deps/rabbit/src/rabbit_definitions_import_local_filesystem.erl
new file mode 100644
index 0000000000..896fcaa40e
--- /dev/null
+++ b/deps/rabbit/src/rabbit_definitions_import_local_filesystem.erl
@@ -0,0 +1,140 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_definitions_import_local_filesystem).
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-export([
+ is_enabled/0,
+ %% definition source options
+ load/1,
+ %% classic arguments specific to this source
+ load/2,
+ location/0
+]).
+
+
+
+-import(rabbit_misc, [pget/2, pget/3]).
+-import(rabbit_data_coercion, [to_binary/1]).
+-import(rabbit_definitions, [import_raw/1]).
+
+%%
+%% API
+%%
+
+-spec is_enabled() -> boolean().
+is_enabled() ->
+ is_enabled_via_classic_option() or is_enabled_via_modern_option().
+
+load(Proplist) when is_list(Proplist) ->
+ case pget(local_path, Proplist, undefined) of
+ undefined -> {error, "local definition file path is not configured: local_path is not set"};
+ Path ->
+ rabbit_log:debug("Asked to import definitions from a local file or directory at '~s'", [Path]),
+ case file:read_file_info(Path) of
+ {ok, FileInfo} ->
+ %% same check is used by Cuttlefish validation, this is to be extra defensive
+ IsReadable = (element(4, FileInfo) == read) or (element(4, FileInfo) == read_write),
+ case IsReadable of
+ true ->
+ load_from_single_file(Path);
+ false ->
+ Msg = rabbit_misc:format("local definition file '~s' does not exist or cannot be read by the node", [Path]),
+ {error, Msg}
+ end;
+ _ ->
+ Msg = rabbit_misc:format("local definition file '~s' does not exist or cannot be read by the node", [Path]),
+ {error, {could_not_read_defs, Msg}}
+ end
+ end.
+
+load(IsDir, Path) ->
+ load_from_local_path(IsDir, Path).
+
+location() ->
+ case location_from_classic_option() of
+ undefined -> location_from_modern_option();
+ Value -> Value
+ end.
+
+load_from_local_path(true, Dir) ->
+ rabbit_log:info("Applying definitions from directory ~s", [Dir]),
+ load_from_files(file:list_dir(Dir), Dir);
+load_from_local_path(false, File) ->
+ load_from_single_file(File).
+
+%%
+%% Implementation
+%%
+
+-spec is_enabled_via_classic_option() -> boolean().
+is_enabled_via_classic_option() ->
+ %% Classic way of defining a local filesystem definition source
+ case application:get_env(rabbit, load_definitions) of
+ undefined -> false;
+ {ok, none} -> false;
+ {ok, _Path} -> true
+ end.
+
+-spec is_enabled_via_modern_option() -> boolean().
+is_enabled_via_modern_option() ->
+ %% Modern way of defining a local filesystem definition source
+ case application:get_env(rabbit, definitions) of
+ undefined -> false;
+ {ok, none} -> false;
+ {ok, []} -> false;
+ {ok, Proplist} ->
+ case pget(import_backend, Proplist, undefined) of
+ undefined -> false;
+ ?MODULE -> true;
+ _ -> false
+ end
+ end.
+
+location_from_classic_option() ->
+ case application:get_env(rabbit, load_definitions) of
+ undefined -> undefined;
+ {ok, none} -> undefined;
+ {ok, Path} -> Path
+ end.
+
+location_from_modern_option() ->
+ case application:get_env(rabbit, definitions) of
+ undefined -> undefined;
+ {ok, none} -> undefined;
+ {ok, Proplist} ->
+ pget(local_path, Proplist)
+ end.
+
+
+load_from_files({ok, Filenames0}, Dir) ->
+ Filenames1 = lists:sort(Filenames0),
+ Filenames2 = [filename:join(Dir, F) || F <- Filenames1],
+ load_from_multiple_files(Filenames2);
+load_from_files({error, E}, Dir) ->
+ rabbit_log:error("Could not read definitions from directory ~s, Error: ~p", [Dir, E]),
+ {error, {could_not_read_defs, E}}.
+
+load_from_multiple_files([]) ->
+ ok;
+load_from_multiple_files([File|Rest]) ->
+ case load_from_single_file(File) of
+ ok -> load_from_multiple_files(Rest);
+ {error, E} -> {error, {failed_to_import_definitions, File, E}}
+ end.
+
+load_from_single_file(Path) ->
+ rabbit_log:debug("Will try to load definitions from a local file or directory at '~s'", [Path]),
+ case file:read_file(Path) of
+ {ok, Body} ->
+ rabbit_log:info("Applying definitions from file at '~s'", [Path]),
+ import_raw(Body);
+ {error, E} ->
+ rabbit_log:error("Could not read definitions from file at '~s', error: ~p", [Path, E]),
+ {error, {could_not_read_defs, {Path, E}}}
+ end.
diff --git a/deps/rabbit/src/rabbit_diagnostics.erl b/deps/rabbit/src/rabbit_diagnostics.erl
index 999596cdc9..1f4a7bdbe0 100644
--- a/deps/rabbit/src/rabbit_diagnostics.erl
+++ b/deps/rabbit/src/rabbit_diagnostics.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_diagnostics).
diff --git a/deps/rabbit/src/rabbit_direct.erl b/deps/rabbit/src/rabbit_direct.erl
index 3fc2d75908..e2d057802b 100644
--- a/deps/rabbit/src/rabbit_direct.erl
+++ b/deps/rabbit/src/rabbit_direct.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_direct).
@@ -18,8 +18,8 @@
%% For testing only
-export([extract_extra_auth_props/4]).
--include("rabbit.hrl").
--include("rabbit_misc.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_misc.hrl").
%%----------------------------------------------------------------------------
@@ -75,7 +75,7 @@ auth_fun({Username, Password}, VHost, ExtraAuthProps) ->
connect(Creds, VHost, Protocol, Pid, Infos) ->
ExtraAuthProps = extract_extra_auth_props(Creds, VHost, Pid, Infos),
AuthFun = auth_fun(Creds, VHost, ExtraAuthProps),
- case rabbit:is_running() of
+ case rabbit_boot_state:has_reached_and_is_active(core_started) of
true ->
case whereis(rabbit_direct_client_sup) of
undefined ->
diff --git a/deps/rabbit/src/rabbit_direct_reply_to.erl b/deps/rabbit/src/rabbit_direct_reply_to.erl
new file mode 100644
index 0000000000..50e2ee6e81
--- /dev/null
+++ b/deps/rabbit/src/rabbit_direct_reply_to.erl
@@ -0,0 +1,78 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_direct_reply_to).
+
+%% API
+-export([
+ %% Original amq.rabbitmq.reply-to target channel encoding
+ compute_key_and_suffix_v1/1,
+ decode_reply_to_v1/1,
+
+ %% v2 amq.rabbitmq.reply-to target channel encoding
+ compute_key_and_suffix_v2/1,
+ decode_reply_to_v2/2
+]).
+
+%%
+%% API
+%%
+
+-type decoded_pid_and_key() :: {ok, pid(), binary()} | {error, any()}.
+
+-spec compute_key_and_suffix_v1(pid()) -> {binary(), binary()}.
+%% This original pid encoding function produces values that exceed routing key length limit
+%% on nodes with long (say, 130+ characters) node names.
+compute_key_and_suffix_v1(Pid) ->
+ Key = base64:encode(rabbit_guid:gen()),
+ PidEnc = base64:encode(term_to_binary(Pid)),
+ Suffix = <<PidEnc/binary, ".", Key/binary>>,
+ {Key, Suffix}.
+
+-spec decode_reply_to_v1(binary()) -> decoded_pid_and_key() | {error, any()}.
+decode_reply_to_v1(Bin) ->
+ case string:lexemes(Bin, ".") of
+ [PidEnc, Key] -> Pid = binary_to_term(base64:decode(PidEnc)),
+ {ok, Pid, unicode:characters_to_binary(Key)};
+ _ -> {error, unrecognized_format}
+ end.
+
+
+-spec compute_key_and_suffix_v2(pid()) -> {binary(), binary()}.
+%% This pid encoding function produces values that are of mostly fixed size
+%% regardless of the node name length.
+compute_key_and_suffix_v2(Pid) ->
+ Key = base64:encode(rabbit_guid:gen()),
+
+ PidParts0 = #{node := Node} = pid_recomposition:decompose(Pid),
+ %% Note: we hash the entire node name. This is sufficient for our needs of shortening node name
+ %% in the TTB-encoded pid, and helps avoid doing the node name split for every single cluster member
+ %% in rabbit_nodes:all_running_with_hashes/0.
+ %%
+ %% We also use a synthetic node prefix because the hash alone will be sufficient to
+ NodeHash = erlang:phash2(Node),
+ PidParts = maps:update(node, rabbit_nodes_common:make("reply", integer_to_list(NodeHash)), PidParts0),
+ RecomposedEncoded = base64:encode(pid_recomposition:to_binary(PidParts)),
+
+ Suffix = <<RecomposedEncoded/binary, ".", Key/binary>>,
+ {Key, Suffix}.
+
+-spec decode_reply_to_v2(binary(), #{non_neg_integer() => node()}) -> decoded_pid_and_key() | {error, any()}.
+decode_reply_to_v2(Bin, CandidateNodes) ->
+ case string:lexemes(Bin, ".") of
+ [PidEnc, Key] ->
+ RawPidBin = base64:decode(PidEnc),
+ PidParts0 = #{node := ShortenedNodename} = pid_recomposition:from_binary(RawPidBin),
+ {_, NodeHash} = rabbit_nodes_common:parts(ShortenedNodename),
+ case maps:get(list_to_integer(NodeHash), CandidateNodes, undefined) of
+ undefined -> error;
+ Candidate ->
+ PidParts = maps:update(node, Candidate, PidParts0),
+ {ok, pid_recomposition:recompose(PidParts), unicode:characters_to_binary(Key)}
+ end;
+ _ -> {error, unrecognized_format}
+ end.
diff --git a/deps/rabbit/src/rabbit_disk_monitor.erl b/deps/rabbit/src/rabbit_disk_monitor.erl
index 8277794098..28b4cb3eba 100644
--- a/deps/rabbit/src/rabbit_disk_monitor.erl
+++ b/deps/rabbit/src/rabbit_disk_monitor.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_disk_monitor).
@@ -33,6 +33,7 @@
get_disk_free/0, set_enabled/1]).
-define(SERVER, ?MODULE).
+-define(ETS_NAME, ?MODULE).
-define(DEFAULT_MIN_DISK_CHECK_INTERVAL, 100).
-define(DEFAULT_MAX_DISK_CHECK_INTERVAL, 10000).
-define(DEFAULT_DISK_FREE_LIMIT, 50000000).
@@ -66,57 +67,49 @@
%%----------------------------------------------------------------------------
--type disk_free_limit() :: (integer() | string() | {'mem_relative', float() | integer()}).
+-type disk_free_limit() :: integer() | {'absolute', integer()} | string() | {'mem_relative', float() | integer()}.
%%----------------------------------------------------------------------------
%% Public API
%%----------------------------------------------------------------------------
-spec get_disk_free_limit() -> integer().
-
get_disk_free_limit() ->
- gen_server:call(?MODULE, get_disk_free_limit, infinity).
+ safe_ets_lookup(disk_free_limit, ?DEFAULT_DISK_FREE_LIMIT).
-spec set_disk_free_limit(disk_free_limit()) -> 'ok'.
-
set_disk_free_limit(Limit) ->
- gen_server:call(?MODULE, {set_disk_free_limit, Limit}, infinity).
+ gen_server:call(?MODULE, {set_disk_free_limit, Limit}).
-spec get_min_check_interval() -> integer().
-
get_min_check_interval() ->
- gen_server:call(?MODULE, get_min_check_interval, infinity).
+ safe_ets_lookup(min_check_interval, ?DEFAULT_MIN_DISK_CHECK_INTERVAL).
-spec set_min_check_interval(integer()) -> 'ok'.
-
set_min_check_interval(Interval) ->
- gen_server:call(?MODULE, {set_min_check_interval, Interval}, infinity).
+ gen_server:call(?MODULE, {set_min_check_interval, Interval}).
-spec get_max_check_interval() -> integer().
-
get_max_check_interval() ->
- gen_server:call(?MODULE, get_max_check_interval, infinity).
+ safe_ets_lookup(max_check_interval, ?DEFAULT_MAX_DISK_CHECK_INTERVAL).
-spec set_max_check_interval(integer()) -> 'ok'.
-
set_max_check_interval(Interval) ->
- gen_server:call(?MODULE, {set_max_check_interval, Interval}, infinity).
+ gen_server:call(?MODULE, {set_max_check_interval, Interval}).
-spec get_disk_free() -> (integer() | 'unknown').
--spec set_enabled(string()) -> 'ok'.
-
get_disk_free() ->
- gen_server:call(?MODULE, get_disk_free, infinity).
+ safe_ets_lookup(disk_free, unknown).
+-spec set_enabled(string()) -> 'ok'.
set_enabled(Enabled) ->
- gen_server:call(?MODULE, {set_enabled, Enabled}, infinity).
+ gen_server:call(?MODULE, {set_enabled, Enabled}).
%%----------------------------------------------------------------------------
%% gen_server callbacks
%%----------------------------------------------------------------------------
-spec start_link(disk_free_limit()) -> rabbit_types:ok_pid_or_error().
-
start_link(Args) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [Args], []).
@@ -124,18 +117,16 @@ init([Limit]) ->
Dir = dir(),
{ok, Retries} = application:get_env(rabbit, disk_monitor_failure_retries),
{ok, Interval} = application:get_env(rabbit, disk_monitor_failure_retry_interval),
- State = #state{dir = Dir,
- min_interval = ?DEFAULT_MIN_DISK_CHECK_INTERVAL,
- max_interval = ?DEFAULT_MAX_DISK_CHECK_INTERVAL,
- alarmed = false,
- enabled = true,
- limit = Limit,
- retries = Retries,
- interval = Interval},
- {ok, enable(State)}.
-
-handle_call(get_disk_free_limit, _From, State = #state{limit = Limit}) ->
- {reply, Limit, State};
+ ?ETS_NAME = ets:new(?ETS_NAME, [protected, set, named_table]),
+ State0 = #state{dir = Dir,
+ alarmed = false,
+ enabled = true,
+ limit = Limit,
+ retries = Retries,
+ interval = Interval},
+ State1 = set_min_check_interval(?DEFAULT_MIN_DISK_CHECK_INTERVAL, State0),
+ State2 = set_max_check_interval(?DEFAULT_MAX_DISK_CHECK_INTERVAL, State1),
+ {ok, enable(State2)}.
handle_call({set_disk_free_limit, _}, _From, #state{enabled = false} = State) ->
rabbit_log:info("Cannot set disk free limit: "
@@ -145,20 +136,14 @@ handle_call({set_disk_free_limit, _}, _From, #state{enabled = false} = State) ->
handle_call({set_disk_free_limit, Limit}, _From, State) ->
{reply, ok, set_disk_limits(State, Limit)};
-handle_call(get_min_check_interval, _From, State) ->
- {reply, State#state.min_interval, State};
-
handle_call(get_max_check_interval, _From, State) ->
{reply, State#state.max_interval, State};
handle_call({set_min_check_interval, MinInterval}, _From, State) ->
- {reply, ok, State#state{min_interval = MinInterval}};
+ {reply, ok, set_min_check_interval(MinInterval, State)};
handle_call({set_max_check_interval, MaxInterval}, _From, State) ->
- {reply, ok, State#state{max_interval = MaxInterval}};
-
-handle_call(get_disk_free, _From, State = #state { actual = Actual }) ->
- {reply, Actual, State};
+ {reply, ok, set_max_check_interval(MaxInterval, State)};
handle_call({set_enabled, _Enabled = true}, _From, State) ->
start_timer(set_disk_limits(State, State#state.limit)),
@@ -193,14 +178,36 @@ code_change(_OldVsn, State, _Extra) ->
%% Server Internals
%%----------------------------------------------------------------------------
+safe_ets_lookup(Key, Default) ->
+ try
+ case ets:lookup(?ETS_NAME, Key) of
+ [{Key, Value}] ->
+ Value;
+ [] ->
+ Default
+ end
+ catch
+ error:badarg ->
+ Default
+ end.
+
% the partition / drive containing this directory will be monitored
dir() -> rabbit_mnesia:dir().
+set_min_check_interval(MinInterval, State) ->
+ ets:insert(?ETS_NAME, {min_check_interval, MinInterval}),
+ State#state{min_interval = MinInterval}.
+
+set_max_check_interval(MaxInterval, State) ->
+ ets:insert(?ETS_NAME, {max_check_interval, MaxInterval}),
+ State#state{max_interval = MaxInterval}.
+
set_disk_limits(State, Limit0) ->
Limit = interpret_limit(Limit0),
State1 = State#state { limit = Limit },
- rabbit_log:info("Disk free limit set to ~pMB~n",
+ rabbit_log:info("Disk free limit set to ~pMB",
[trunc(Limit / 1000000)]),
+ ets:insert(?ETS_NAME, {disk_free_limit, Limit}),
internal_update(State1).
internal_update(State = #state { limit = Limit,
@@ -218,7 +225,8 @@ internal_update(State = #state { limit = Limit,
_ ->
ok
end,
- State #state {alarmed = NewAlarmed, actual = CurrentFree}.
+ ets:insert(?ETS_NAME, {disk_free, CurrentFree}),
+ State#state{alarmed = NewAlarmed, actual = CurrentFree}.
get_disk_free(Dir) ->
get_disk_free(Dir, os:type()).
@@ -226,11 +234,89 @@ get_disk_free(Dir) ->
get_disk_free(Dir, {unix, Sun})
when Sun =:= sunos; Sun =:= sunos4; Sun =:= solaris ->
Df = os:find_executable("df"),
- parse_free_unix(rabbit_misc:os_cmd(Df ++ " -k " ++ Dir));
+ parse_free_unix(run_cmd(Df ++ " -k " ++ Dir));
get_disk_free(Dir, {unix, _}) ->
Df = os:find_executable("df"),
- parse_free_unix(rabbit_misc:os_cmd(Df ++ " -kP " ++ Dir));
+ parse_free_unix(run_cmd(Df ++ " -kP " ++ Dir));
get_disk_free(Dir, {win32, _}) ->
+ % Dir:
+ % "c:/Users/username/AppData/Roaming/RabbitMQ/db/rabbit2@username-z01-mnesia"
+ case win32_get_drive_letter(Dir) of
+ error ->
+ rabbit_log:warning("Expected the mnesia directory absolute "
+ "path to start with a drive letter like "
+ "'C:'. The path is: '~p'", [Dir]),
+ case win32_get_disk_free_dir(Dir) of
+ {ok, Free} ->
+ Free;
+ _ -> exit(could_not_determine_disk_free)
+ end;
+ DriveLetter ->
+ case win32_get_disk_free_fsutil(DriveLetter) of
+ {ok, Free0} -> Free0;
+ error ->
+ case win32_get_disk_free_pwsh(DriveLetter) of
+ {ok, Free1} -> Free1;
+ _ -> exit(could_not_determine_disk_free)
+ end
+ end
+ end.
+
+parse_free_unix(Str) ->
+ case string:tokens(Str, "\n") of
+ [_, S | _] -> case string:tokens(S, " \t") of
+ [_, _, _, Free | _] -> list_to_integer(Free) * 1024;
+ _ -> exit({unparseable, Str})
+ end;
+ _ -> exit({unparseable, Str})
+ end.
+
+win32_get_drive_letter([DriveLetter, $:, $/ | _]) when
+ (DriveLetter >= $a andalso DriveLetter =< $z) orelse
+ (DriveLetter >= $A andalso DriveLetter =< $Z) ->
+ DriveLetter;
+win32_get_drive_letter(_) ->
+ error.
+
+win32_get_disk_free_fsutil(DriveLetter) when
+ (DriveLetter >= $a andalso DriveLetter =< $z) orelse
+ (DriveLetter >= $A andalso DriveLetter =< $Z) ->
+ % DriveLetter $c
+ FsutilCmd = "fsutil.exe volume diskfree " ++ [DriveLetter] ++ ":",
+
+ % C:\windows\system32>fsutil volume diskfree c:
+ % Total free bytes : 812,733,878,272 (756.9 GB)
+ % Total bytes : 1,013,310,287,872 (943.7 GB)
+ % Total quota free bytes : 812,733,878,272 (756.9 GB)
+ case run_cmd(FsutilCmd) of
+ {error, timeout} ->
+ error;
+ FsutilResult ->
+ case string:slice(FsutilResult, 0, 5) of
+ "Error" ->
+ error;
+ "Total" ->
+ FirstLine = hd(string:tokens(FsutilResult, "\r\n")),
+ {match, [FreeStr]} = re:run(FirstLine, "(\\d+,?)+", [{capture, first, list}]),
+ {ok, list_to_integer(lists:flatten(string:tokens(FreeStr, ",")))}
+ end
+ end.
+
+win32_get_disk_free_pwsh(DriveLetter) when
+ (DriveLetter >= $a andalso DriveLetter =< $z) orelse
+ (DriveLetter >= $A andalso DriveLetter =< $Z) ->
+ % DriveLetter $c
+ PoshCmd = "powershell.exe -NoLogo -NoProfile -NonInteractive -Command (Get-PSDrive " ++ [DriveLetter] ++ ").Free",
+ case run_cmd(PoshCmd) of
+ {error, timeout} ->
+ error;
+ PoshResultStr ->
+ % Note: remove \r\n
+ PoshResult = string:slice(PoshResultStr, 0, length(PoshResultStr) - 2),
+ {ok, list_to_integer(PoshResult)}
+ end.
+
+win32_get_disk_free_dir(Dir) ->
%% On Windows, the Win32 API enforces a limit of 260 characters
%% (MAX_PATH). If we call `dir` with a path longer than that, it
%% fails with "File not found". Starting with Windows 10 version
@@ -252,26 +338,17 @@ get_disk_free(Dir, {win32, _}) ->
%% See the following page to learn more about this:
%% https://ss64.com/nt/syntax-filenames.html
RawDir = "\\\\?\\" ++ string:replace(Dir, "/", "\\", all),
- parse_free_win32(rabbit_misc:os_cmd("dir /-C /W \"" ++ RawDir ++ "\"")).
-
-parse_free_unix(Str) ->
- case string:tokens(Str, "\n") of
- [_, S | _] -> case string:tokens(S, " \t") of
- [_, _, _, Free | _] -> list_to_integer(Free) * 1024;
- _ -> exit({unparseable, Str})
- end;
- _ -> exit({unparseable, Str})
- end.
-
-parse_free_win32(CommandResult) ->
+ CommandResult = run_cmd("dir /-C /W \"" ++ RawDir ++ "\""),
LastLine = lists:last(string:tokens(CommandResult, "\r\n")),
{match, [Free]} = re:run(lists:reverse(LastLine), "(\\d+)",
[{capture, all_but_first, list}]),
- list_to_integer(lists:reverse(Free)).
+ {ok, list_to_integer(lists:reverse(Free))}.
interpret_limit({mem_relative, Relative})
when is_number(Relative) ->
round(Relative * vm_memory_monitor:get_total_memory());
+interpret_limit({absolute, Absolute}) ->
+ interpret_limit(Absolute);
interpret_limit(Absolute) ->
case rabbit_resource_monitor_misc:parse_information_unit(Absolute) of
{ok, ParsedAbsolute} -> ParsedAbsolute;
@@ -283,7 +360,7 @@ interpret_limit(Absolute) ->
emit_update_info(StateStr, CurrentFree, Limit) ->
rabbit_log:info(
- "Free disk space is ~s. Free bytes: ~p. Limit: ~p~n",
+ "Free disk space is ~s. Free bytes: ~p. Limit: ~p",
[StateStr, CurrentFree, Limit]).
start_timer(State) ->
@@ -306,12 +383,29 @@ enable(#state{dir = Dir, interval = Interval, limit = Limit, retries = Retries}
case {catch get_disk_free(Dir),
vm_memory_monitor:get_total_memory()} of
{N1, N2} when is_integer(N1), is_integer(N2) ->
- rabbit_log:info("Enabling free disk space monitoring~n", []),
+ rabbit_log:info("Enabling free disk space monitoring", []),
start_timer(set_disk_limits(State, Limit));
Err ->
rabbit_log:info("Free disk space monitor encountered an error "
- "(e.g. failed to parse output from OS tools): ~p, retries left: ~b~n",
+ "(e.g. failed to parse output from OS tools): ~p, retries left: ~b",
[Err, Retries]),
erlang:send_after(Interval, self(), try_enable),
State#state{enabled = false}
end.
+
+run_cmd(Cmd) ->
+ Pid = self(),
+ Ref = make_ref(),
+ CmdFun = fun() ->
+ CmdResult = rabbit_misc:os_cmd(Cmd),
+ Pid ! {Pid, Ref, CmdResult}
+ end,
+ CmdPid = spawn(CmdFun),
+ receive
+ {Pid, Ref, CmdResult} ->
+ CmdResult
+ after 5000 ->
+ exit(CmdPid, kill),
+ rabbit_log:error("Command timed out: '~s'", [Cmd]),
+ {error, timeout}
+ end.
diff --git a/deps/rabbit/src/rabbit_epmd_monitor.erl b/deps/rabbit/src/rabbit_epmd_monitor.erl
index 938826dba6..fc2c91d860 100644
--- a/deps/rabbit/src/rabbit_epmd_monitor.erl
+++ b/deps/rabbit/src/rabbit_epmd_monitor.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_epmd_monitor).
@@ -78,18 +78,16 @@ check_epmd(State = #state{mod = Mod,
me = Me,
host = Host,
port = Port0}) ->
- rabbit_log:debug("Asked to [re-]register this node (~s@~s) with epmd...", [Me, Host]),
{ok, Port1} = handle_port_please(check, Mod:port_please(Me, Host), Me, Port0),
rabbit_nodes:ensure_epmd(),
Mod:register_node(Me, Port1),
- rabbit_log:debug("[Re-]registered this node (~s@~s) with epmd at port ~p", [Me, Host, Port1]),
{ok, State#state{port = Port1}}.
handle_port_please(init, noport, Me, Port) ->
- rabbit_log:info("epmd does not know us, re-registering as ~s~n", [Me]),
+ rabbit_log:info("epmd does not know us, re-registering as ~s", [Me]),
{ok, Port};
handle_port_please(check, noport, Me, Port) ->
- rabbit_log:warning("epmd does not know us, re-registering ~s at port ~b~n", [Me, Port]),
+ rabbit_log:warning("epmd does not know us, re-registering ~s at port ~b", [Me, Port]),
{ok, Port};
handle_port_please(_, closed, _Me, Port) ->
rabbit_log:error("epmd monitor failed to retrieve our port from epmd: closed"),
diff --git a/deps/rabbit/src/rabbit_event_consumer.erl b/deps/rabbit/src/rabbit_event_consumer.erl
index 489d39312e..c9dfe89892 100644
--- a/deps/rabbit/src/rabbit_event_consumer.erl
+++ b/deps/rabbit/src/rabbit_event_consumer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_event_consumer).
diff --git a/deps/rabbit/src/rabbit_exchange.erl b/deps/rabbit/src/rabbit_exchange.erl
index 129b2b868b..5406b541dc 100644
--- a/deps/rabbit/src/rabbit_exchange.erl
+++ b/deps/rabbit/src/rabbit_exchange.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange).
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
-export([recover/1, policy_changed/2, callback/4, declare/7,
assert_equivalence/6, assert_args_equivalence/2, check_type/1,
@@ -574,7 +574,7 @@ peek_serial(XName, LockType) ->
end.
invalid_module(T) ->
- rabbit_log:warning("Could not find exchange type ~s.~n", [T]),
+ rabbit_log:warning("Could not find exchange type ~s.", [T]),
put({xtype_to_module, T}, rabbit_exchange_type_invalid),
rabbit_exchange_type_invalid.
diff --git a/deps/rabbit/src/rabbit_exchange_decorator.erl b/deps/rabbit/src/rabbit_exchange_decorator.erl
index 02d0258d3c..79ffda7d9c 100644
--- a/deps/rabbit/src/rabbit_exchange_decorator.erl
+++ b/deps/rabbit/src/rabbit_exchange_decorator.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_decorator).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([select/2, set/1]).
diff --git a/deps/rabbit/src/rabbit_exchange_parameters.erl b/deps/rabbit/src/rabbit_exchange_parameters.erl
index f9de648cfa..4cee3a49cd 100644
--- a/deps/rabbit/src/rabbit_exchange_parameters.erl
+++ b/deps/rabbit/src/rabbit_exchange_parameters.erl
@@ -2,14 +2,14 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_parameters).
-behaviour(rabbit_runtime_parameter).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([register/0]).
-export([validate/5, notify/5, notify_clear/4]).
diff --git a/deps/rabbit/src/rabbit_exchange_type_direct.erl b/deps/rabbit/src/rabbit_exchange_type_direct.erl
index 3f4350e7b0..a4177dd281 100644
--- a/deps/rabbit/src/rabbit_exchange_type_direct.erl
+++ b/deps/rabbit/src/rabbit_exchange_type_direct.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_direct).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_exchange_type).
diff --git a/deps/rabbit/src/rabbit_exchange_type_fanout.erl b/deps/rabbit/src/rabbit_exchange_type_fanout.erl
index a8778cf0c7..e376ebb9e4 100644
--- a/deps/rabbit/src/rabbit_exchange_type_fanout.erl
+++ b/deps/rabbit/src/rabbit_exchange_type_fanout.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_fanout).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_exchange_type).
diff --git a/deps/rabbit/src/rabbit_exchange_type_headers.erl b/deps/rabbit/src/rabbit_exchange_type_headers.erl
index e40195de7a..233dd50869 100644
--- a/deps/rabbit/src/rabbit_exchange_type_headers.erl
+++ b/deps/rabbit/src/rabbit_exchange_type_headers.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_headers).
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
-behaviour(rabbit_exchange_type).
diff --git a/deps/rabbit/src/rabbit_exchange_type_invalid.erl b/deps/rabbit/src/rabbit_exchange_type_invalid.erl
index 3fa27d28e9..1c4552c428 100644
--- a/deps/rabbit/src/rabbit_exchange_type_invalid.erl
+++ b/deps/rabbit/src/rabbit_exchange_type_invalid.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_invalid).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_exchange_type).
diff --git a/deps/rabbit/src/rabbit_exchange_type_topic.erl b/deps/rabbit/src/rabbit_exchange_type_topic.erl
index 38b05895f2..775500f133 100644
--- a/deps/rabbit/src/rabbit_exchange_type_topic.erl
+++ b/deps/rabbit/src/rabbit_exchange_type_topic.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_topic).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_exchange_type).
diff --git a/deps/rabbit/src/rabbit_feature_flags.erl b/deps/rabbit/src/rabbit_feature_flags.erl
index 921ec9ab53..66e9dd49eb 100644
--- a/deps/rabbit/src/rabbit_feature_flags.erl
+++ b/deps/rabbit/src/rabbit_feature_flags.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @author The RabbitMQ team
-%% @copyright 2018-2020 VMware, Inc. or its affiliates.
+%% @copyright 2018-2021 VMware, Inc. or its affiliates.
%%
%% @doc
%% This module offers a framework to declare capabilities a RabbitMQ node
diff --git a/deps/rabbit/src/rabbit_ff_extra.erl b/deps/rabbit/src/rabbit_ff_extra.erl
index f0728d491e..99a8820bb7 100644
--- a/deps/rabbit/src/rabbit_ff_extra.erl
+++ b/deps/rabbit/src/rabbit_ff_extra.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% @copyright 2018-2020 VMware, Inc. or its affiliates.
+%% @copyright 2018-2021 VMware, Inc. or its affiliates.
%%
%% @doc
%% This module provides extra functions unused by the feature flags
diff --git a/deps/rabbit/src/rabbit_ff_registry.erl b/deps/rabbit/src/rabbit_ff_registry.erl
index 372971f949..b5cec25501 100644
--- a/deps/rabbit/src/rabbit_ff_registry.erl
+++ b/deps/rabbit/src/rabbit_ff_registry.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @author The RabbitMQ team
-%% @copyright 2018-2020 VMware, Inc. or its affiliates.
+%% @copyright 2018-2021 VMware, Inc. or its affiliates.
%%
%% @doc
%% This module exposes the API of the {@link rabbit_feature_flags}
diff --git a/deps/rabbit/src/rabbit_fhc_helpers.erl b/deps/rabbit/src/rabbit_fhc_helpers.erl
index d310e84008..5ada16b55d 100644
--- a/deps/rabbit/src/rabbit_fhc_helpers.erl
+++ b/deps/rabbit/src/rabbit_fhc_helpers.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_fhc_helpers).
diff --git a/deps/rabbit/src/rabbit_fifo.erl b/deps/rabbit/src/rabbit_fifo.erl
index 51acfffd0d..e7efb21f61 100644
--- a/deps/rabbit/src/rabbit_fifo.erl
+++ b/deps/rabbit/src/rabbit_fifo.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_fifo).
@@ -41,6 +41,7 @@
query_single_active_consumer/1,
query_in_memory_usage/1,
query_peek/2,
+ query_notify_decorators_info/1,
usage/1,
zero/1,
@@ -241,7 +242,7 @@ apply(Meta, #credit{credit = NewCredit, delivery_count = RemoteDelCnt,
{State1, ok, Effects} =
checkout(Meta, State0,
State0#?MODULE{service_queue = ServiceQueue,
- consumers = Cons}, []),
+ consumers = Cons}, [], false),
Response = {send_credit_reply, messages_ready(State1)},
%% by this point all checkouts for the updated credit value
%% should be processed so we can evaluate the drain
@@ -299,7 +300,8 @@ apply(#{index := Index,
Exists = maps:is_key(ConsumerId, Consumers),
case messages_ready(State0) of
0 ->
- {State0, {dequeue, empty}};
+ update_smallest_raft_index(Index, {dequeue, empty}, State0,
+ [notify_decorators_effect(State0)]);
_ when Exists ->
%% a dequeue using the same consumer_id isn't possible at this point
{State0, {dequeue, empty}};
@@ -330,18 +332,21 @@ apply(#{index := Index,
{{dequeue, {MsgId, Msg}, Ready-1}, Effects1}
end,
-
- case evaluate_limit(Index, false, State0, State4, Effects2) of
+ NotifyEffect = notify_decorators_effect(State4),
+ case evaluate_limit(Index, false, State0, State4, [NotifyEffect | Effects2]) of
{State, true, Effects} ->
update_smallest_raft_index(Index, Reply, State, Effects);
{State, false, Effects} ->
{State, Reply, Effects}
end
end;
-apply(Meta, #checkout{spec = cancel, consumer_id = ConsumerId}, State0) ->
- {State, Effects} = cancel_consumer(Meta, ConsumerId, State0, [],
- consumer_cancel),
- checkout(Meta, State0, State, Effects);
+apply(#{index := Idx} = Meta,
+ #checkout{spec = cancel,
+ consumer_id = ConsumerId}, State0) ->
+ {State1, Effects1} = cancel_consumer(Meta, ConsumerId, State0, [],
+ consumer_cancel),
+ {State, Reply, Effects} = checkout(Meta, State0, State1, Effects1),
+ update_smallest_raft_index(Idx, Reply, State, Effects);
apply(Meta, #checkout{spec = Spec, meta = ConsumerMeta,
consumer_id = {_, Pid} = ConsumerId},
State0) ->
@@ -368,10 +373,10 @@ apply(#{index := Index}, #purge{},
Effects0 = [garbage_collection],
Reply = {purge, Total},
{State, _, Effects} = evaluate_limit(Index, false, State0,
- State1, Effects0),
+ State1, Effects0),
update_smallest_raft_index(Index, Reply, State, Effects);
-apply(_Meta, #garbage_collection{}, State) ->
- {State, ok, [{aux, garbage_collection}]};
+apply(#{index := Idx}, #garbage_collection{}, State) ->
+ update_smallest_raft_index(Idx, ok, State, [{aux, garbage_collection}]);
apply(#{system_time := Ts} = Meta, {down, Pid, noconnection},
#?MODULE{consumers = Cons0,
cfg = #cfg{consumer_strategy = single_active},
@@ -456,6 +461,7 @@ apply(#{system_time := Ts} = Meta, {down, Pid, noconnection},
% Monitor the node so that we can "unsuspect" these processes when the node
% comes back, then re-issue all monitors and discover the final fate of
% these processes
+
Effects = case maps:size(State#?MODULE.consumers) of
0 ->
[{aux, inactive}, {monitor, node, Node}];
@@ -503,15 +509,20 @@ apply(Meta, {nodeup, Node}, #?MODULE{consumers = Cons0,
checkout(Meta, State0, State, Effects);
apply(_, {nodedown, _Node}, State) ->
{State, ok};
-apply(Meta, #purge_nodes{nodes = Nodes}, State0) ->
+apply(#{index := Idx} = Meta, #purge_nodes{nodes = Nodes}, State0) ->
{State, Effects} = lists:foldl(fun(Node, {S, E}) ->
purge_node(Meta, Node, S, E)
end, {State0, []}, Nodes),
- {State, ok, Effects};
-apply(Meta, #update_config{config = Conf}, State) ->
- checkout(Meta, State, update_config(Conf, State), []);
+ update_smallest_raft_index(Idx, ok, State, Effects);
+apply(#{index := Idx} = Meta, #update_config{config = Conf}, State0) ->
+ {State, Reply, Effects} = checkout(Meta, State0, update_config(Conf, State0), []),
+ update_smallest_raft_index(Idx, Reply, State, Effects);
apply(_Meta, {machine_version, 0, 1}, V0State) ->
State = convert_v0_to_v1(V0State),
+ {State, ok, []};
+apply(_Meta, Cmd, State) ->
+ %% handle unhandled commands gracefully
+ rabbit_log:debug("rabbit_fifo: unhandled command ~W", [Cmd, 10]),
{State, ok, []}.
convert_v0_to_v1(V0State0) ->
@@ -532,7 +543,7 @@ convert_v0_to_v1(V0State0) ->
list_to_tuple(tuple_to_list(C0) ++ [0])
end, V0Cons),
V0SQ = rabbit_fifo_v0:get_field(service_queue, V0State),
- V1SQ = priority_queue:from_list(queue:to_list(V0SQ)),
+ V1SQ = priority_queue:from_list([{0, C} || C <- queue:to_list(V0SQ)]),
Cfg = #cfg{name = rabbit_fifo_v0:get_cfg_field(name, V0State),
resource = rabbit_fifo_v0:get_cfg_field(resource, V0State),
release_cursor_interval = rabbit_fifo_v0:get_cfg_field(release_cursor_interval, V0State),
@@ -673,7 +684,8 @@ state_enter(eol, #?MODULE{enqueuers = Enqs,
AllConsumers = maps:merge(Custs, WaitingConsumers1),
[{send_msg, P, eol, ra_event}
|| P <- maps:keys(maps:merge(Enqs, AllConsumers))] ++
- [{mod_call, rabbit_quorum_queue, file_handle_release_reservation, []}];
+ [{aux, eol},
+ {mod_call, rabbit_quorum_queue, file_handle_release_reservation, []}];
state_enter(State, #?MODULE{cfg = #cfg{resource = _Resource}}) when State =/= leader ->
FHReservation = {mod_call, rabbit_quorum_queue, file_handle_other_reservation, []},
[FHReservation];
@@ -709,6 +721,7 @@ overview(#?MODULE{consumers = Cons,
enqueue_count = EnqCount,
msg_bytes_enqueue = EnqueueBytes,
msg_bytes_checkout = CheckoutBytes,
+ ra_indexes = Indexes,
cfg = Cfg} = State) ->
Conf = #{name => Cfg#cfg.name,
resource => Cfg#cfg.resource,
@@ -722,18 +735,21 @@ overview(#?MODULE{consumers = Cons,
expires => Cfg#cfg.expires,
delivery_limit => Cfg#cfg.delivery_limit
},
+ Smallest = rabbit_fifo_index:smallest(Indexes),
#{type => ?MODULE,
config => Conf,
num_consumers => maps:size(Cons),
num_checked_out => num_checked_out(State),
num_enqueuers => maps:size(Enqs),
num_ready_messages => messages_ready(State),
+ num_pending_messages => messages_pending(State),
num_messages => messages_total(State),
num_release_cursors => lqueue:len(Cursors),
release_cursors => [I || {_, I, _} <- lqueue:to_list(Cursors)],
release_cursor_enqueue_counter => EnqCount,
enqueue_message_bytes => EnqueueBytes,
- checkout_message_bytes => CheckoutBytes}.
+ checkout_message_bytes => CheckoutBytes,
+ smallest_raft_index => Smallest}.
-spec get_checked_out(consumer_id(), msg_id(), msg_id(), state()) ->
[delivery_msg()].
@@ -755,7 +771,7 @@ which_module(1) -> ?MODULE.
-record(aux_gc, {last_raft_idx = 0 :: ra:index()}).
-record(aux, {name :: atom(),
- utilisation :: term(),
+ capacity :: term(),
gc = #aux_gc{} :: #aux_gc{}}).
init_aux(Name) when is_atom(Name) ->
@@ -765,27 +781,43 @@ init_aux(Name) when is_atom(Name) ->
{write_concurrency, true}]),
Now = erlang:monotonic_time(micro_seconds),
#aux{name = Name,
- utilisation = {inactive, Now, 1, 1.0}}.
+ capacity = {inactive, Now, 1, 1.0}}.
-handle_aux(leader, _, garbage_collection, State, Log, _MacState) ->
- ra_log_wal:force_roll_over(ra_log_wal),
- {no_reply, State, Log};
+handle_aux(leader, _, garbage_collection, State, Log, MacState) ->
+ % ra_log_wal:force_roll_over(ra_log_wal),
+ {no_reply, force_eval_gc(Log, MacState, State), Log};
handle_aux(follower, _, garbage_collection, State, Log, MacState) ->
- ra_log_wal:force_roll_over(ra_log_wal),
+ % ra_log_wal:force_roll_over(ra_log_wal),
{no_reply, force_eval_gc(Log, MacState, State), Log};
handle_aux(_RaState, cast, eval, Aux0, Log, _MacState) ->
{no_reply, Aux0, Log};
-handle_aux(_RaState, cast, Cmd, #aux{utilisation = Use0} = Aux0,
+handle_aux(_RaState, cast, Cmd, #aux{capacity = Use0} = Aux0,
Log, _MacState)
when Cmd == active orelse Cmd == inactive ->
- {no_reply, Aux0#aux{utilisation = update_use(Use0, Cmd)}, Log};
+ {no_reply, Aux0#aux{capacity = update_use(Use0, Cmd)}, Log};
handle_aux(_RaState, cast, tick, #aux{name = Name,
- utilisation = Use0} = State0,
+ capacity = Use0} = State0,
Log, MacState) ->
true = ets:insert(rabbit_fifo_usage,
- {Name, utilisation(Use0)}),
+ {Name, capacity(Use0)}),
Aux = eval_gc(Log, MacState, State0),
{no_reply, Aux, Log};
+handle_aux(_RaState, cast, eol, #aux{name = Name} = Aux, Log, _) ->
+ ets:delete(rabbit_fifo_usage, Name),
+ {no_reply, Aux, Log};
+handle_aux(_RaState, {call, _From}, oldest_entry_timestamp, Aux,
+ Log, #?MODULE{ra_indexes = Indexes}) ->
+ Ts = case rabbit_fifo_index:smallest(Indexes) of
+ %% if there are no entries, we return current timestamp
+ %% so that any previously obtained entries are considered older than this
+ undefined ->
+ erlang:system_time(millisecond);
+ Idx when is_integer(Idx) ->
+ {{_, _, {_, Meta, _, _}}, _Log1} = ra_log:fetch(Idx, Log),
+ #{ts := Timestamp} = Meta,
+ Timestamp
+ end,
+ {reply, {ok, Ts}, Aux, Log};
handle_aux(_RaState, {call, _From}, {peek, Pos}, Aux0,
Log0, MacState) ->
case rabbit_fifo:query_peek(Pos, MacState) of
@@ -955,6 +987,21 @@ query_peek(Pos, State0) when Pos > 0 ->
query_peek(Pos-1, State)
end.
+query_notify_decorators_info(#?MODULE{consumers = Consumers} = State) ->
+ MaxActivePriority = maps:fold(fun(_, #consumer{credit = C,
+ status = up,
+ priority = P0}, MaxP) when C > 0 ->
+ P = -P0,
+ case MaxP of
+ empty -> P;
+ MaxP when MaxP > P -> MaxP;
+ _ -> P
+ end;
+ (_, _, MaxP) ->
+ MaxP
+ end, empty, Consumers),
+ IsEmpty = (messages_ready(State) == 0),
+ {MaxActivePriority, IsEmpty}.
-spec usage(atom()) -> float().
usage(Name) when is_atom(Name) ->
@@ -965,6 +1012,11 @@ usage(Name) when is_atom(Name) ->
%%% Internal
+messages_pending(#?MODULE{enqueuers = Enqs}) ->
+ maps:fold(fun(_, #enqueuer{pending = P}, Acc) ->
+ length(P) + Acc
+ end, 0, Enqs).
+
messages_ready(#?MODULE{messages = M,
prefix_msgs = {RCnt, _R, PCnt, _P},
returns = R}) ->
@@ -987,9 +1039,11 @@ update_use({inactive, Since, Active, Avg}, active) ->
Now = erlang:monotonic_time(micro_seconds),
{active, Now, use_avg(Active, Now - Since, Avg)}.
-utilisation({active, Since, Avg}) ->
+capacity({active, Since, Avg}) ->
use_avg(erlang:monotonic_time(micro_seconds) - Since, 0, Avg);
-utilisation({inactive, Since, Active, Avg}) ->
+capacity({inactive, _, 1, 1.0}) ->
+ 1.0;
+capacity({inactive, Since, Active, Avg}) ->
use_avg(Active, erlang:monotonic_time(micro_seconds) - Since, Avg).
use_avg(0, 0, Avg) ->
@@ -1058,11 +1112,13 @@ cancel_consumer0(Meta, ConsumerId,
#{ConsumerId := Consumer} ->
{S, Effects2} = maybe_return_all(Meta, ConsumerId, Consumer,
S0, Effects0, Reason),
+
%% The effects are emitted before the consumer is actually removed
%% if the consumer has unacked messages. This is a bit weird but
%% in line with what classic queues do (from an external point of
%% view)
Effects = cancel_consumer_effects(ConsumerId, S, Effects2),
+
case maps:size(S#?MODULE.consumers) of
0 ->
{S, [{aux, inactive} | Effects]};
@@ -1125,7 +1181,7 @@ apply_enqueue(#{index := RaftIdx} = Meta, From, Seq, RawMsg, State0) ->
case maybe_enqueue(RaftIdx, From, Seq, RawMsg, [], State0) of
{ok, State1, Effects1} ->
State2 = append_to_master_index(RaftIdx, State1),
- {State, ok, Effects} = checkout(Meta, State0, State2, Effects1),
+ {State, ok, Effects} = checkout(Meta, State0, State2, Effects1, false),
{maybe_store_dehydrated_state(RaftIdx, State), ok, Effects};
{duplicate, State, Effects} ->
{State, ok, Effects}
@@ -1163,7 +1219,8 @@ enqueue(RaftIdx, RawMsg, #?MODULE{messages = Messages,
case evaluate_memory_limit(Header, State0) of
true ->
% indexed message with header map
- {State0, {RaftIdx, {Header, 'empty'}}};
+ {State0,
+ {RaftIdx, {Header, 'empty'}}};
false ->
{add_in_memory_counts(Header, State0),
{RaftIdx, {Header, RawMsg}}} % indexed message with header map
@@ -1250,7 +1307,7 @@ maybe_enqueue(RaftIdx, From, MsgSeqNo, RawMsg, Effects0,
#enqueuer{next_seqno = Next,
pending = Pending0} = Enq0
when MsgSeqNo > Next ->
- % out of order delivery
+ % out of order enqueue
Pending = [{MsgSeqNo, RaftIdx, RawMsg} | Pending0],
Enq = Enq0#enqueuer{pending = lists:sort(Pending)},
{ok, State0#?MODULE{enqueuers = Enqueuers0#{From => Enq}}, Effects0};
@@ -1283,7 +1340,7 @@ return(#{index := IncomingRaftIdx} = Meta, ConsumerId, Returned,
_ ->
State1
end,
- {State, ok, Effects} = checkout(Meta, State0, State2, Effects1),
+ {State, ok, Effects} = checkout(Meta, State0, State2, Effects1, false),
update_smallest_raft_index(IncomingRaftIdx, State, Effects).
% used to processes messages that are finished
@@ -1325,9 +1382,9 @@ complete_and_checkout(#{index := IncomingRaftIdx} = Meta, MsgIds, ConsumerId,
#consumer{checked_out = Checked0} = Con0,
Effects0, State0) ->
Discarded = maps:with(MsgIds, Checked0),
- {State2, Effects1} = complete(Meta, ConsumerId, Discarded, Con0,
+ {State1, Effects1} = complete(Meta, ConsumerId, Discarded, Con0,
Effects0, State0),
- {State, ok, Effects} = checkout(Meta, State0, State2, Effects1),
+ {State, ok, Effects} = checkout(Meta, State0, State1, Effects1, false),
update_smallest_raft_index(IncomingRaftIdx, State, Effects).
dead_letter_effects(_Reason, _Discarded,
@@ -1359,9 +1416,10 @@ dead_letter_effects(Reason, Discarded,
end} | Effects].
cancel_consumer_effects(ConsumerId,
- #?MODULE{cfg = #cfg{resource = QName}}, Effects) ->
+ #?MODULE{cfg = #cfg{resource = QName}} = State, Effects) ->
[{mod_call, rabbit_quorum_queue,
- cancel_consumer_handler, [QName, ConsumerId]} | Effects].
+ cancel_consumer_handler, [QName, ConsumerId]},
+ notify_decorators_effect(State) | Effects].
update_smallest_raft_index(Idx, State, Effects) ->
update_smallest_raft_index(Idx, ok, State, Effects).
@@ -1496,39 +1554,55 @@ return_all(Meta, #?MODULE{consumers = Cons} = State0, Effects0, ConsumerId,
end, {State, Effects0}, Checked).
%% checkout new messages to consumers
-checkout(#{index := Index} = Meta, OldState, State0, Effects0) ->
+checkout(Meta, OldState, State, Effects) ->
+ checkout(Meta, OldState, State, Effects, true).
+
+checkout(#{index := Index} = Meta, #?MODULE{cfg = #cfg{resource = QName}} = OldState, State0,
+ Effects0, HandleConsumerChanges) ->
{State1, _Result, Effects1} = checkout0(Meta, checkout_one(Meta, State0),
- Effects0, {#{}, #{}}),
+ Effects0, #{}),
case evaluate_limit(Index, false, OldState, State1, Effects1) of
{State, true, Effects} ->
- update_smallest_raft_index(Index, State, Effects);
+ case maybe_notify_decorators(State, HandleConsumerChanges) of
+ {true, {MaxActivePriority, IsEmpty}} ->
+ NotifyEffect = notify_decorators_effect(QName, MaxActivePriority, IsEmpty),
+ update_smallest_raft_index(Index, State, [NotifyEffect | Effects]);
+ false ->
+ update_smallest_raft_index(Index, State, Effects)
+ end;
{State, false, Effects} ->
- {State, ok, Effects}
+ case maybe_notify_decorators(State, HandleConsumerChanges) of
+ {true, {MaxActivePriority, IsEmpty}} ->
+ NotifyEffect = notify_decorators_effect(QName, MaxActivePriority, IsEmpty),
+ {State, ok, [NotifyEffect | Effects]};
+ false ->
+ {State, ok, Effects}
+ end
end.
checkout0(Meta, {success, ConsumerId, MsgId, {RaftIdx, {Header, 'empty'}}, State},
- Effects, {SendAcc, LogAcc0}) ->
+ Effects, SendAcc0) ->
DelMsg = {RaftIdx, {MsgId, Header}},
- LogAcc = maps:update_with(ConsumerId,
- fun (M) -> [DelMsg | M] end,
- [DelMsg], LogAcc0),
- checkout0(Meta, checkout_one(Meta, State), Effects, {SendAcc, LogAcc});
+ SendAcc = maps:update_with(ConsumerId,
+ fun ({InMem, LogMsgs}) ->
+ {InMem, [DelMsg | LogMsgs]}
+ end, {[], [DelMsg]}, SendAcc0),
+ checkout0(Meta, checkout_one(Meta, State), Effects, SendAcc);
checkout0(Meta, {success, ConsumerId, MsgId, Msg, State}, Effects,
- {SendAcc0, LogAcc}) ->
+ SendAcc0) ->
DelMsg = {MsgId, Msg},
SendAcc = maps:update_with(ConsumerId,
- fun (M) -> [DelMsg | M] end,
- [DelMsg], SendAcc0),
- checkout0(Meta, checkout_one(Meta, State), Effects, {SendAcc, LogAcc});
-checkout0(_Meta, {Activity, State0}, Effects0, {SendAcc, LogAcc}) ->
+ fun ({InMem, LogMsgs}) ->
+ {[DelMsg | InMem], LogMsgs}
+ end, {[DelMsg], []}, SendAcc0),
+ checkout0(Meta, checkout_one(Meta, State), Effects, SendAcc);
+checkout0(_Meta, {Activity, State0}, Effects0, SendAcc) ->
Effects1 = case Activity of
nochange ->
- append_send_msg_effects(
- append_log_effects(Effects0, LogAcc), SendAcc);
+ append_delivery_effects(Effects0, SendAcc);
inactive ->
[{aux, inactive}
- | append_send_msg_effects(
- append_log_effects(Effects0, LogAcc), SendAcc)]
+ | append_delivery_effects(Effects0, SendAcc)]
end,
{State0, ok, lists:reverse(Effects1)}.
@@ -1598,18 +1672,14 @@ evaluate_memory_limit(Size,
when is_integer(Size) ->
(Length >= MaxLength) orelse ((Bytes + Size) > MaxBytes).
-append_send_msg_effects(Effects, AccMap) when map_size(AccMap) == 0 ->
- Effects;
-append_send_msg_effects(Effects0, AccMap) ->
- Effects = maps:fold(fun (C, Msgs, Ef) ->
- [send_msg_effect(C, lists:reverse(Msgs)) | Ef]
- end, Effects0, AccMap),
- [{aux, active} | Effects].
-
-append_log_effects(Effects0, AccMap) ->
- maps:fold(fun (C, Msgs, Ef) ->
- [send_log_effect(C, lists:reverse(Msgs)) | Ef]
- end, Effects0, AccMap).
+append_delivery_effects(Effects0, AccMap) when map_size(AccMap) == 0 ->
+ %% does this ever happen?
+ Effects0;
+append_delivery_effects(Effects0, AccMap) ->
+ [{aux, active} |
+ maps:fold(fun (C, {InMemMsgs, LogMsgs}, Ef) ->
+ [delivery_effect(C, lists:reverse(LogMsgs), InMemMsgs) | Ef]
+ end, Effects0, AccMap)].
%% next message is determined as follows:
%% First we check if there are are prefex returns
@@ -1659,16 +1729,22 @@ take_next_msg(#?MODULE{returns = Returns,
end
end.
-send_msg_effect({CTag, CPid}, Msgs) ->
- {send_msg, CPid, {delivery, CTag, Msgs}, [local, ra_event]}.
-
-send_log_effect({CTag, CPid}, IdxMsgs) ->
+delivery_effect({CTag, CPid}, [], InMemMsgs) ->
+ {send_msg, CPid, {delivery, CTag, lists:reverse(InMemMsgs)},
+ [local, ra_event]};
+delivery_effect({CTag, CPid}, IdxMsgs, InMemMsgs) ->
{RaftIdxs, Data} = lists:unzip(IdxMsgs),
{log, RaftIdxs,
fun(Log) ->
- Msgs = lists:zipwith(fun ({enqueue, _, _, Msg}, {MsgId, Header}) ->
- {MsgId, {Header, Msg}}
- end, Log, Data),
+ Msgs0 = lists:zipwith(fun ({enqueue, _, _, Msg}, {MsgId, Header}) ->
+ {MsgId, {Header, Msg}}
+ end, Log, Data),
+ Msgs = case InMemMsgs of
+ [] ->
+ Msgs0;
+ _ ->
+ lists:sort(InMemMsgs ++ Msgs0)
+ end,
[{send_msg, CPid, {delivery, CTag, Msgs}, [local, ra_event]}]
end,
{local, node(CPid)}}.
@@ -1684,25 +1760,26 @@ checkout_one(Meta, #?MODULE{service_queue = SQ0,
messages = Messages0,
consumers = Cons0} = InitState) ->
case priority_queue:out(SQ0) of
- {{value, ConsumerId}, SQ1} ->
+ {{value, ConsumerId}, SQ1}
+ when is_map_key(ConsumerId, Cons0) ->
case take_next_msg(InitState) of
{ConsumerMsg, State0} ->
%% there are consumers waiting to be serviced
%% process consumer checkout
- case maps:find(ConsumerId, Cons0) of
- {ok, #consumer{credit = 0}} ->
+ case maps:get(ConsumerId, Cons0) of
+ #consumer{credit = 0} ->
%% no credit but was still on queue
%% can happen when draining
%% recurse without consumer on queue
checkout_one(Meta, InitState#?MODULE{service_queue = SQ1});
- {ok, #consumer{status = cancelled}} ->
+ #consumer{status = cancelled} ->
checkout_one(Meta, InitState#?MODULE{service_queue = SQ1});
- {ok, #consumer{status = suspected_down}} ->
+ #consumer{status = suspected_down} ->
checkout_one(Meta, InitState#?MODULE{service_queue = SQ1});
- {ok, #consumer{checked_out = Checked0,
- next_msg_id = Next,
- credit = Credit,
- delivery_count = DelCnt} = Con0} ->
+ #consumer{checked_out = Checked0,
+ next_msg_id = Next,
+ credit = Credit,
+ delivery_count = DelCnt} = Con0 ->
Checked = maps:put(Next, ConsumerMsg, Checked0),
Con = Con0#consumer{checked_out = Checked,
next_msg_id = Next + 1,
@@ -1729,14 +1806,14 @@ checkout_one(Meta, #?MODULE{service_queue = SQ0,
add_bytes_checkout(Header, State1)),
M}
end,
- {success, ConsumerId, Next, Msg, State};
- error ->
- %% consumer did not exist but was queued, recurse
- checkout_one(Meta, InitState#?MODULE{service_queue = SQ1})
+ {success, ConsumerId, Next, Msg, State}
end;
empty ->
{nochange, InitState}
end;
+ {{value, _ConsumerId}, SQ1} ->
+ %% consumer did not exist but was queued, recurse
+ checkout_one(Meta, InitState#?MODULE{service_queue = SQ1});
{empty, _} ->
case lqueue:len(Messages0) of
0 -> {nochange, InitState};
@@ -2122,3 +2199,16 @@ get_priority_from_args(#{args := Args}) ->
end;
get_priority_from_args(_) ->
0.
+
+maybe_notify_decorators(_, false) ->
+ false;
+maybe_notify_decorators(State, _) ->
+ {true, query_notify_decorators_info(State)}.
+
+notify_decorators_effect(#?MODULE{cfg = #cfg{resource = QName}} = State) ->
+ {MaxActivePriority, IsEmpty} = query_notify_decorators_info(State),
+ notify_decorators_effect(QName, MaxActivePriority, IsEmpty).
+
+notify_decorators_effect(QName, MaxActivePriority, IsEmpty) ->
+ {mod_call, rabbit_quorum_queue, spawn_notify_decorators,
+ [QName, consumer_state_changed, [MaxActivePriority, IsEmpty]]}.
diff --git a/deps/rabbit/src/rabbit_fifo_client.erl b/deps/rabbit/src/rabbit_fifo_client.erl
index 3990222b15..7f2cd55acc 100644
--- a/deps/rabbit/src/rabbit_fifo_client.erl
+++ b/deps/rabbit/src/rabbit_fifo_client.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc Provides an easy to consume API for interacting with the {@link rabbit_fifo.}
@@ -31,16 +31,17 @@
update_machine_state/2,
pending_size/1,
stat/1,
- stat/2
+ stat/2,
+ query_single_active_consumer/1
]).
-include_lib("rabbit_common/include/rabbit.hrl").
-define(SOFT_LIMIT, 32).
-define(TIMER_TIME, 10000).
+-define(COMMAND_TIMEOUT, 30000).
-type seq() :: non_neg_integer().
-%% last_applied is initialised to -1
-type maybe_seq() :: integer().
-type action() :: {send_credit_reply, Available :: non_neg_integer()} |
{send_drained, CTagCredit ::
@@ -142,27 +143,33 @@ init(ClusterName = #resource{}, Servers, SoftLimit, BlockFun, UnblockFun) ->
enqueue(Correlation, Msg,
#state{queue_status = undefined,
next_enqueue_seq = 1,
- cfg = #cfg{timeout = Timeout}} = State0) ->
+ cfg = #cfg{servers = Servers,
+ timeout = Timeout}} = State0) ->
%% it is the first enqueue, check the version
- {_, Node} = Server = pick_server(State0),
+ {_, Node} = pick_server(State0),
case rpc:call(Node, ra_machine, version, [{machine, rabbit_fifo, #{}}]) of
0 ->
%% the leader is running the old version
- %% so we can't initialize the enqueuer session safely
- %% fall back on old behavour
enqueue(Correlation, Msg, State0#state{queue_status = go});
1 ->
%% were running the new version on the leader do sync initialisation
%% of enqueuer session
Reg = rabbit_fifo:make_register_enqueuer(self()),
- case ra:process_command(Server, Reg, Timeout) of
- {ok, reject_publish, _} ->
- {reject_publish, State0#state{queue_status = reject_publish}};
- {ok, ok, _} ->
- enqueue(Correlation, Msg, State0#state{queue_status = go});
+ case ra:process_command(Servers, Reg, Timeout) of
+ {ok, reject_publish, Leader} ->
+ {reject_publish, State0#state{leader = Leader,
+ queue_status = reject_publish}};
+ {ok, ok, Leader} ->
+ enqueue(Correlation, Msg, State0#state{leader = Leader,
+ queue_status = go});
+ {error, {no_more_servers_to_try, _Errs}} ->
+ %% if we are not able to process the register command
+ %% it is safe to reject the message as we never attempted
+ %% to send it
+ {reject_publish, State0};
+ %% TODO: not convinced this can ever happen when using
+ %% a list of servers
{timeout, _} ->
- %% if we timeout it is probably better to reject
- %% the message than being uncertain
{reject_publish, State0};
Err ->
exit(Err)
@@ -292,7 +299,7 @@ settle(ConsumerTag, [_|_] = MsgIds,
%% from {@link rabbit_fifo:delivery/0.}
%% @param State the {@module} state
%% @returns
-%% `{ok | slow, State}' if the command was successfully sent. If the return
+%% `{State, list()}' if the command was successfully sent. If the return
%% tag is `slow' it means the limit is approaching and it is time to slow down
%% the sending rate.
%%
@@ -302,10 +309,8 @@ return(ConsumerTag, [_|_] = MsgIds, #state{slow = false} = State0) ->
Node = pick_server(State0),
% TODO: make rabbit_fifo return support lists of message ids
Cmd = rabbit_fifo:make_return(consumer_id(ConsumerTag), MsgIds),
- case send_command(Node, undefined, Cmd, normal, State0) of
- {_, S} ->
- {S, []}
- end;
+ {_Tag, State1} = send_command(Node, undefined, Cmd, normal, State0),
+ {State1, []};
return(ConsumerTag, [_|_] = MsgIds,
#state{unsent_commands = Unsent0} = State0) ->
ConsumerId = consumer_id(ConsumerTag),
@@ -315,7 +320,8 @@ return(ConsumerTag, [_|_] = MsgIds,
fun ({Settles, Returns, Discards}) ->
{Settles, Returns ++ MsgIds, Discards}
end, {[], MsgIds, []}, Unsent0),
- {State0#state{unsent_commands = Unsent}, []}.
+ State1 = State0#state{unsent_commands = Unsent},
+ {State1, []}.
%% @doc Discards a checked out message.
%% If the queue has a dead_letter_handler configured this will be called.
@@ -387,6 +393,20 @@ checkout(ConsumerTag, NumUnsettled, CreditMode, Meta,
ack = Ack}, CDels0),
try_process_command(Servers, Cmd, State0#state{consumer_deliveries = SDels}).
+
+-spec query_single_active_consumer(state()) ->
+ {ok, term()} | {error, term()} | {timeout, term()}.
+query_single_active_consumer(#state{leader = undefined}) ->
+ {error, leader_not_known};
+query_single_active_consumer(#state{leader = Leader}) ->
+ case ra:local_query(Leader, fun rabbit_fifo:query_single_active_consumer/1,
+ ?COMMAND_TIMEOUT) of
+ {ok, {_, Reply}, _} ->
+ {ok, Reply};
+ Err ->
+ Err
+ end.
+
%% @doc Provide credit to the queue
%%
%% This only has an effect if the consumer uses credit mode: credited
@@ -436,8 +456,8 @@ cancel_checkout(ConsumerTag, #state{consumer_deliveries = CDels} = State0) ->
%% @doc Purges all the messages from a rabbit_fifo queue and returns the number
%% of messages purged.
-spec purge(ra:server_id()) -> {ok, non_neg_integer()} | {error | timeout, term()}.
-purge(Node) ->
- case ra:process_command(Node, rabbit_fifo:make_purge()) of
+purge(Server) ->
+ case ra:process_command(Server, rabbit_fifo:make_purge(), ?COMMAND_TIMEOUT) of
{ok, {purge, Reply}, _} ->
{ok, Reply};
Err ->
@@ -474,7 +494,7 @@ cluster_name(#state{cfg = #cfg{cluster_name = ClusterName}}) ->
ClusterName.
update_machine_state(Server, Conf) ->
- case ra:process_command(Server, rabbit_fifo:make_update_config(Conf)) of
+ case ra:process_command(Server, rabbit_fifo:make_update_config(Conf), ?COMMAND_TIMEOUT) of
{ok, ok, _} ->
ok;
Err ->
@@ -589,7 +609,7 @@ handle_ra_event(Leader, {machine, leader_change}, State0) ->
%% we need to update leader
%% and resend any pending commands
State = resend_all_pending(State0#state{leader = Leader}),
- {ok, State, []};
+ {ok, cancel_timer(State), []};
handle_ra_event(_From, {rejected, {not_leader, undefined, _Seq}}, State0) ->
% TODO: how should these be handled? re-sent on timer or try random
{ok, State0, []};
@@ -632,8 +652,9 @@ untracked_enqueue([Node | _], Msg) ->
%% Internal
-try_process_command([Server | Rem], Cmd, State) ->
- case ra:process_command(Server, Cmd, 30000) of
+try_process_command([Server | Rem], Cmd,
+ #state{cfg = #cfg{timeout = Timeout}} = State) ->
+ case ra:process_command(Server, Cmd, Timeout) of
{ok, _, Leader} ->
{ok, State#state{leader = Leader}};
Err when length(Rem) =:= 0 ->
@@ -682,7 +703,6 @@ maybe_add_action(Action, Acc, State) ->
{[Action | Acc], State}.
do_resends(From, To, State) when From =< To ->
- % ?INFO("rabbit_fifo_client: doing resends From ~w To ~w~n", [From, To]),
lists:foldl(fun resend/2, State, lists:seq(From, To));
do_resends(_, _, State) ->
State.
@@ -710,10 +730,10 @@ maybe_auto_ack(false, {deliver, Tag, _Ack, Msgs} = Deliver, State0) ->
{State, Actions} = settle(Tag, MsgIds, State0),
{ok, State, [Deliver] ++ Actions}.
-
handle_delivery(Leader, {delivery, Tag, [{FstId, _} | _] = IdMsgs},
#state{cfg = #cfg{cluster_name = QName},
- consumer_deliveries = CDels0} = State0) ->
+ consumer_deliveries = CDels0} = State0)
+ when is_map_key(Tag, CDels0) ->
QRef = qref(Leader),
{LastId, _} = lists:last(IdMsgs),
Consumer = #consumer{ack = Ack} = maps:get(Tag, CDels0),
@@ -765,7 +785,17 @@ handle_delivery(Leader, {delivery, Tag, [{FstId, _} | _] = IdMsgs},
length(IdMsgs),
C#consumer{last_msg_id = LastId},
CDels0)})
- end.
+ end;
+handle_delivery(_Leader, {delivery, Tag, [_ | _] = IdMsgs},
+ #state{consumer_deliveries = CDels0} = State0)
+ when not is_map_key(Tag, CDels0) ->
+ %% Note:
+ %% https://github.com/rabbitmq/rabbitmq-server/issues/3729
+ %% If the consumer is no longer in the deliveries map,
+ %% we should return all messages.
+ MsgIntIds = [Id || {Id, _} <- IdMsgs],
+ {State1, Deliveries} = return(Tag, MsgIntIds, State0),
+ {ok, State1, Deliveries}.
transform_msgs(QName, QRef, Msgs) ->
lists:map(
@@ -794,7 +824,7 @@ get_missing_deliveries(Leader, From, To, ConsumerTag) ->
Query = fun (State) ->
rabbit_fifo:get_checked_out(ConsumerId, From, To, State)
end,
- case ra:local_query(Leader, Query) of
+ case ra:local_query(Leader, Query, ?COMMAND_TIMEOUT) of
{ok, {_, Missing}, _} ->
Missing;
{error, Error} ->
diff --git a/deps/rabbit/src/rabbit_fifo_v0.erl b/deps/rabbit/src/rabbit_fifo_v0.erl
index a61f42616d..971c789217 100644
--- a/deps/rabbit/src/rabbit_fifo_v0.erl
+++ b/deps/rabbit/src/rabbit_fifo_v0.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_fifo_v0).
diff --git a/deps/rabbit/src/rabbit_file.erl b/deps/rabbit/src/rabbit_file.erl
index f8263d9e77..2060c354c5 100644
--- a/deps/rabbit/src/rabbit_file.erl
+++ b/deps/rabbit/src/rabbit_file.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_file).
@@ -40,7 +40,7 @@ is_file(File) ->
is_dir(Dir) -> is_dir_internal(read_file_info(Dir)).
-is_dir_no_handle(Dir) -> is_dir_internal(prim_file:read_file_info(Dir)).
+is_dir_no_handle(Dir) -> is_dir_internal(file:read_file_info(Dir, [raw])).
is_dir_internal({ok, #file_info{type=directory}}) -> true;
is_dir_internal(_) -> false.
@@ -83,14 +83,23 @@ wildcard(Pattern, Dir) ->
list_dir(Dir) -> with_handle(fun () -> prim_file:list_dir(Dir) end).
read_file_info(File) ->
- with_handle(fun () -> prim_file:read_file_info(File) end).
+ with_handle(fun () -> file:read_file_info(File, [raw]) end).
-spec read_term_file
(file:filename()) -> {'ok', [any()]} | rabbit_types:error(any()).
read_term_file(File) ->
try
- {ok, Data} = with_handle(fun () -> prim_file:read_file(File) end),
+ F = fun() ->
+ {ok, FInfo} = file:read_file_info(File, [raw]),
+ {ok, Fd} = file:open(File, [read, raw, binary]),
+ try
+ file:read(Fd, FInfo#file_info.size)
+ after
+ file:close(Fd)
+ end
+ end,
+ {ok, Data} = with_handle(F),
{ok, Tokens, _} = erl_scan:string(binary_to_list(Data)),
TokenGroups = group_tokens(Tokens),
{ok, [begin
diff --git a/deps/rabbit/src/rabbit_framing.erl b/deps/rabbit/src/rabbit_framing.erl
index 42927b2b68..6ff80120ae 100644
--- a/deps/rabbit/src/rabbit_framing.erl
+++ b/deps/rabbit/src/rabbit_framing.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% TODO auto-generate
diff --git a/deps/rabbit/src/rabbit_global_counters.erl b/deps/rabbit/src/rabbit_global_counters.erl
new file mode 100644
index 0000000000..2689cca0e7
--- /dev/null
+++ b/deps/rabbit/src/rabbit_global_counters.erl
@@ -0,0 +1,216 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_global_counters).
+
+-export([
+ boot_step/0,
+ init/1,
+ init/2,
+ overview/0,
+ prometheus_format/0,
+ increase_protocol_counter/3,
+ messages_received/2,
+ messages_received_confirm/2,
+ messages_routed/2,
+ messages_unroutable_dropped/2,
+ messages_unroutable_returned/2,
+ messages_confirmed/2,
+ messages_delivered/3,
+ messages_delivered_consume_manual_ack/3,
+ messages_delivered_consume_auto_ack/3,
+ messages_delivered_get_manual_ack/3,
+ messages_delivered_get_auto_ack/3,
+ messages_get_empty/3,
+ messages_redelivered/3,
+ messages_acknowledged/3,
+ publisher_created/1,
+ publisher_deleted/1,
+ consumer_created/1,
+ consumer_deleted/1
+ ]).
+
+%% PROTOCOL COUNTERS:
+-define(MESSAGES_RECEIVED, 1).
+-define(MESSAGES_RECEIVED_CONFIRM, 2).
+-define(MESSAGES_ROUTED, 3).
+-define(MESSAGES_UNROUTABLE_DROPPED, 4).
+-define(MESSAGES_UNROUTABLE_RETURNED, 5).
+-define(MESSAGES_CONFIRMED, 6).
+-define(PUBLISHERS, 7).
+-define(CONSUMERS, 8).
+%% Note: ?NUM_PROTOCOL_COUNTERS needs to be up-to-date. See include/rabbit_global_counters.hrl
+-define(PROTOCOL_COUNTERS,
+ [
+ {
+ messages_received_total, ?MESSAGES_RECEIVED, counter,
+ "Total number of messages received from publishers"
+ },
+ {
+ messages_received_confirm_total, ?MESSAGES_RECEIVED_CONFIRM, counter,
+ "Total number of messages received from publishers expecting confirmations"
+ },
+ {
+ messages_routed_total, ?MESSAGES_ROUTED, counter,
+ "Total number of messages routed to queues or streams"
+ },
+ {
+ messages_unroutable_dropped_total, ?MESSAGES_UNROUTABLE_DROPPED, counter,
+ "Total number of messages published as non-mandatory into an exchange and dropped as unroutable"
+ },
+ {
+ messages_unroutable_returned_total, ?MESSAGES_UNROUTABLE_RETURNED, counter,
+ "Total number of messages published as mandatory into an exchange and returned to the publisher as unroutable"
+ },
+ {
+ messages_confirmed_total, ?MESSAGES_CONFIRMED, counter,
+ "Total number of messages confirmed to publishers"
+ },
+ {
+ publishers, ?PUBLISHERS, gauge,
+ "Current number of publishers"
+ },
+ {
+ consumers, ?CONSUMERS, gauge,
+ "Current number of consumers"
+ }
+ ]).
+
+%% Protocol & QueueType counters:
+-define(MESSAGES_DELIVERED, 1).
+-define(MESSAGES_DELIVERED_CONSUME_MANUAL_ACK, 2).
+-define(MESSAGES_DELIVERED_CONSUME_AUTO_ACK, 3).
+-define(MESSAGES_DELIVERED_GET_MANUAL_ACK, 4).
+-define(MESSAGES_DELIVERED_GET_AUTO_ACK, 5).
+-define(MESSAGES_GET_EMPTY, 6).
+-define(MESSAGES_REDELIVERED, 7).
+-define(MESSAGES_ACKNOWLEDGED, 8).
+%% Note: ?NUM_PROTOCOL_QUEUE_TYPE_COUNTERS needs to be up-to-date. See include/rabbit_global_counters.hrl
+-define(PROTOCOL_QUEUE_TYPE_COUNTERS,
+ [
+ {
+ messages_delivered_total, ?MESSAGES_DELIVERED, counter,
+ "Total number of messages delivered to consumers"
+ },
+ {
+ messages_delivered_consume_manual_ack_total, ?MESSAGES_DELIVERED_CONSUME_MANUAL_ACK, counter,
+ "Total number of messages delivered to consumers using basic.consume with manual acknowledgment"
+ },
+ {
+ messages_delivered_consume_auto_ack_total, ?MESSAGES_DELIVERED_CONSUME_AUTO_ACK, counter,
+ "Total number of messages delivered to consumers using basic.consume with automatic acknowledgment"
+ },
+ {
+ messages_delivered_get_manual_ack_total, ?MESSAGES_DELIVERED_GET_MANUAL_ACK, counter,
+ "Total number of messages delivered to consumers using basic.get with manual acknowledgment"
+ },
+ {
+ messages_delivered_get_auto_ack_total, ?MESSAGES_DELIVERED_GET_AUTO_ACK, counter,
+ "Total number of messages delivered to consumers using basic.get with automatic acknowledgment"
+ },
+ {
+ messages_get_empty_total, ?MESSAGES_GET_EMPTY, counter,
+ "Total number of times basic.get operations fetched no message"
+ },
+ {
+ messages_redelivered_total, ?MESSAGES_REDELIVERED, counter,
+ "Total number of messages redelivered to consumers"
+ },
+ {
+ messages_acknowledged_total, ?MESSAGES_ACKNOWLEDGED, counter,
+ "Total number of messages acknowledged by consumers"
+ }
+ ]).
+
+boot_step() ->
+ init([{protocol, amqp091}]),
+ init([{protocol, amqp091}, {queue_type, rabbit_classic_queue}]),
+ init([{protocol, amqp091}, {queue_type, rabbit_quorum_queue}]),
+ init([{protocol, amqp091}, {queue_type, rabbit_stream_queue}]).
+
+init(Labels) ->
+ init(Labels, []).
+
+init(Labels = [{protocol, Protocol}, {queue_type, QueueType}], Extra) ->
+ _ = seshat_counters:new_group(?MODULE),
+ Counters = seshat_counters:new(?MODULE, Labels, ?PROTOCOL_QUEUE_TYPE_COUNTERS ++ Extra),
+ persistent_term:put({?MODULE, Protocol, QueueType}, Counters),
+ ok;
+init(Labels = [{protocol, Protocol}], Extra) ->
+ _ = seshat_counters:new_group(?MODULE),
+ Counters = seshat_counters:new(?MODULE, Labels, ?PROTOCOL_COUNTERS ++ Extra),
+ persistent_term:put({?MODULE, Protocol}, Counters),
+ ok.
+
+overview() ->
+ seshat_counters:overview(?MODULE).
+
+prometheus_format() ->
+ seshat_counters:prometheus_format(?MODULE).
+
+increase_protocol_counter(Protocol, Counter, Num) ->
+ counters:add(fetch(Protocol), Counter, Num).
+
+messages_received(Protocol, Num) ->
+ counters:add(fetch(Protocol), ?MESSAGES_RECEIVED, Num).
+
+messages_received_confirm(Protocol, Num) ->
+ counters:add(fetch(Protocol), ?MESSAGES_RECEIVED_CONFIRM, Num).
+
+messages_routed(Protocol, Num) ->
+ counters:add(fetch(Protocol), ?MESSAGES_ROUTED, Num).
+
+messages_unroutable_dropped(Protocol, Num) ->
+ counters:add(fetch(Protocol), ?MESSAGES_UNROUTABLE_DROPPED, Num).
+
+messages_unroutable_returned(Protocol, Num) ->
+ counters:add(fetch(Protocol), ?MESSAGES_UNROUTABLE_RETURNED, Num).
+
+messages_confirmed(Protocol, Num) ->
+ counters:add(fetch(Protocol), ?MESSAGES_CONFIRMED, Num).
+
+messages_delivered(Protocol, QueueType, Num) ->
+ counters:add(fetch(Protocol, QueueType), ?MESSAGES_DELIVERED, Num).
+
+messages_delivered_consume_manual_ack(Protocol, QueueType, Num) ->
+ counters:add(fetch(Protocol, QueueType), ?MESSAGES_DELIVERED_CONSUME_MANUAL_ACK, Num).
+
+messages_delivered_consume_auto_ack(Protocol, QueueType, Num) ->
+ counters:add(fetch(Protocol, QueueType), ?MESSAGES_DELIVERED_CONSUME_AUTO_ACK, Num).
+
+messages_delivered_get_manual_ack(Protocol, QueueType, Num) ->
+ counters:add(fetch(Protocol, QueueType), ?MESSAGES_DELIVERED_GET_MANUAL_ACK, Num).
+
+messages_delivered_get_auto_ack(Protocol, QueueType, Num) ->
+ counters:add(fetch(Protocol, QueueType), ?MESSAGES_DELIVERED_GET_AUTO_ACK, Num).
+
+messages_get_empty(Protocol, QueueType, Num) ->
+ counters:add(fetch(Protocol, QueueType), ?MESSAGES_GET_EMPTY, Num).
+
+messages_redelivered(Protocol, QueueType, Num) ->
+ counters:add(fetch(Protocol, QueueType), ?MESSAGES_REDELIVERED, Num).
+
+messages_acknowledged(Protocol, QueueType, Num) ->
+ counters:add(fetch(Protocol, QueueType), ?MESSAGES_ACKNOWLEDGED, Num).
+
+publisher_created(Protocol) ->
+ counters:add(fetch(Protocol), ?PUBLISHERS, 1).
+
+publisher_deleted(Protocol) ->
+ counters:add(fetch(Protocol), ?PUBLISHERS, -1).
+
+consumer_created(Protocol) ->
+ counters:add(fetch(Protocol), ?CONSUMERS, 1).
+
+consumer_deleted(Protocol) ->
+ counters:add(fetch(Protocol), ?CONSUMERS, -1).
+
+fetch(Protocol) ->
+ persistent_term:get({?MODULE, Protocol}).
+
+fetch(Protocol, QueueType) ->
+ persistent_term:get({?MODULE, Protocol, QueueType}).
diff --git a/deps/rabbit/src/rabbit_guid.erl b/deps/rabbit/src/rabbit_guid.erl
index 01e6464332..c71321d4d3 100644
--- a/deps/rabbit/src/rabbit_guid.erl
+++ b/deps/rabbit/src/rabbit_guid.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_guid).
diff --git a/deps/rabbit/src/rabbit_health_check.erl b/deps/rabbit/src/rabbit_health_check.erl
index 4674ca7d8e..b04c4f9853 100644
--- a/deps/rabbit/src/rabbit_health_check.erl
+++ b/deps/rabbit/src/rabbit_health_check.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_health_check).
@@ -16,11 +16,12 @@
%% External functions
%%----------------------------------------------------------------------------
--spec node(node(), timeout()) -> ok | {badrpc, term()} | {error_string, string()}.
-
node(Node) ->
%% same default as in CLI
node(Node, 70000).
+
+-spec node(node(), timeout()) -> ok | {badrpc, term()} | {error_string, string()}.
+
node(Node, Timeout) ->
rabbit_misc:rpc_call(Node, rabbit_health_check, local, [], Timeout).
@@ -63,7 +64,11 @@ node_health_check(rabbit_node_monitor) ->
end;
node_health_check(alarms) ->
- case proplists:get_value(alarms, rabbit:status()) of
+ % Note:
+ % Removed call to rabbit:status/0 here due to a memory leak on win32,
+ % plus it uses an excessive amount of resources
+ % Alternative to https://github.com/rabbitmq/rabbitmq-server/pull/3893
+ case rabbit:alarms() of
[] ->
ok;
Alarms ->
diff --git a/deps/rabbit/src/rabbit_lager.erl b/deps/rabbit/src/rabbit_lager.erl
deleted file mode 100644
index 3cbc5e431d..0000000000
--- a/deps/rabbit/src/rabbit_lager.erl
+++ /dev/null
@@ -1,723 +0,0 @@
-%% This Source Code Form is subject to the terms of the Mozilla Public
-%% License, v. 2.0. If a copy of the MPL was not distributed with this
-%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
-%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-%%
-
--module(rabbit_lager).
-
--include_lib("rabbit_common/include/rabbit_log.hrl").
-
-%% API
--export([start_logger/0, log_locations/0, fold_sinks/2,
- broker_is_started/0, set_log_level/1]).
-
-%% For test purposes
--export([configure_lager/0]).
-
--export_type([log_location/0]).
-
--type log_location() :: string().
-
-start_logger() ->
- ok = maybe_remove_logger_handler(),
- ok = app_utils:stop_applications([lager, syslog]),
- ok = ensure_lager_configured(),
- ok = app_utils:start_applications([lager]),
- fold_sinks(
- fun
- (_, [], Acc) ->
- Acc;
- (SinkName, _, Acc) ->
- lager:log(SinkName, info, self(),
- "Log file opened with Lager", []),
- Acc
- end, ok),
- ensure_log_working().
-
-broker_is_started() ->
- {ok, HwmCurrent} = application:get_env(lager, error_logger_hwm),
- {ok, HwmOrig0} = application:get_env(lager, error_logger_hwm_original),
- HwmOrig = case get_most_verbose_log_level() of
- debug -> HwmOrig0 * 100;
- _ -> HwmOrig0
- end,
- case HwmOrig =:= HwmCurrent of
- false ->
- ok = application:set_env(lager, error_logger_hwm, HwmOrig),
- Handlers = gen_event:which_handlers(lager_event),
- lists:foreach(fun(Handler) ->
- lager:set_loghwm(Handler, HwmOrig)
- end, Handlers),
- ok;
- _ ->
- ok
- end.
-
-set_log_level(Level) ->
- IsValidLevel = lists:member(Level, lager_util:levels()),
- set_log_level(IsValidLevel, Level).
-
-set_log_level(true, Level) ->
- SinksAndHandlers = [{Sink, gen_event:which_handlers(Sink)} ||
- Sink <- lager:list_all_sinks()],
- DefaultHwm = application:get_env(lager, error_logger_hwm_original, 50),
- Hwm = case Level of
- debug -> DefaultHwm * 100;
- _ -> DefaultHwm
- end,
- application:set_env(lager, error_logger_hwm, Hwm),
- set_sink_log_level(SinksAndHandlers, Level, Hwm);
-set_log_level(_, Level) ->
- {error, {invalid_log_level, Level}}.
-
-set_sink_log_level([], _Level, _Hwm) ->
- ok;
-set_sink_log_level([{Sink, Handlers}|Rest], Level, Hwm) ->
- set_sink_handler_log_level(Sink, Handlers, Level, Hwm),
- set_sink_log_level(Rest, Level, Hwm).
-
-set_sink_handler_log_level(_Sink, [], _Level, _Hwm) ->
- ok;
-set_sink_handler_log_level(Sink, [Handler|Rest], Level, Hwm)
- when is_atom(Handler) andalso is_integer(Hwm) ->
- lager:set_loghwm(Sink, Handler, undefined, Hwm),
- ok = lager:set_loglevel(Sink, Handler, undefined, Level),
- set_sink_handler_log_level(Sink, Rest, Level, Hwm);
-set_sink_handler_log_level(Sink, [{Handler, Id}|Rest], Level, Hwm) ->
- lager:set_loghwm(Sink, Handler, Id, Hwm),
- ok = lager:set_loglevel(Sink, Handler, Id, Level),
- set_sink_handler_log_level(Sink, Rest, Level, Hwm);
-set_sink_handler_log_level(Sink, [_|Rest], Level, Hwm) ->
- set_sink_handler_log_level(Sink, Rest, Level, Hwm).
-
-log_locations() ->
- ensure_lager_configured(),
- DefaultHandlers = application:get_env(lager, handlers, []),
- Sinks = application:get_env(lager, extra_sinks, []),
- ExtraHandlers = [proplists:get_value(handlers, Props, [])
- || {_, Props} <- Sinks],
- lists:sort(log_locations1([DefaultHandlers | ExtraHandlers], [])).
-
-log_locations1([Handlers | Rest], Locations) ->
- Locations1 = log_locations2(Handlers, Locations),
- log_locations1(Rest, Locations1);
-log_locations1([], Locations) ->
- Locations.
-
-log_locations2([{lager_file_backend, Settings} | Rest], Locations) ->
- FileName = lager_file_name1(Settings),
- Locations1 = case lists:member(FileName, Locations) of
- false -> [FileName | Locations];
- true -> Locations
- end,
- log_locations2(Rest, Locations1);
-log_locations2([{lager_console_backend, _} | Rest], Locations) ->
- Locations1 = case lists:member("<stdout>", Locations) of
- false -> ["<stdout>" | Locations];
- true -> Locations
- end,
- log_locations2(Rest, Locations1);
-log_locations2([_ | Rest], Locations) ->
- log_locations2(Rest, Locations);
-log_locations2([], Locations) ->
- Locations.
-
-fold_sinks(Fun, Acc) ->
- Handlers = lager_config:global_get(handlers),
- Sinks = dict:to_list(lists:foldl(
- fun
- ({{lager_file_backend, F}, _, S}, Dict) ->
- dict:append(S, F, Dict);
- ({_, _, S}, Dict) ->
- case dict:is_key(S, Dict) of
- true -> dict:store(S, [], Dict);
- false -> Dict
- end
- end,
- dict:new(), Handlers)),
- fold_sinks(Sinks, Fun, Acc).
-
-fold_sinks([{SinkName, FileNames} | Rest], Fun, Acc) ->
- Acc1 = Fun(SinkName, FileNames, Acc),
- fold_sinks(Rest, Fun, Acc1);
-fold_sinks([], _, Acc) ->
- Acc.
-
-ensure_log_working() ->
- {ok, Handlers} = application:get_env(lager, handlers),
- [ ensure_lager_handler_file_exist(Handler)
- || Handler <- Handlers ],
- Sinks = application:get_env(lager, extra_sinks, []),
- ensure_extra_sinks_working(Sinks, list_expected_sinks()).
-
-ensure_extra_sinks_working(Sinks, [SinkName | Rest]) ->
- case proplists:get_value(SinkName, Sinks) of
- undefined -> throw({error, {cannot_log_to_file, unknown,
- rabbit_log_lager_event_sink_undefined}});
- Sink ->
- SinkHandlers = proplists:get_value(handlers, Sink, []),
- [ ensure_lager_handler_file_exist(Handler)
- || Handler <- SinkHandlers ]
- end,
- ensure_extra_sinks_working(Sinks, Rest);
-ensure_extra_sinks_working(_Sinks, []) ->
- ok.
-
-ensure_lager_handler_file_exist(Handler) ->
- case lager_file_name(Handler) of
- false -> ok;
- FileName -> ensure_logfile_exist(FileName)
- end.
-
-lager_file_name({lager_file_backend, Settings}) ->
- lager_file_name1(Settings);
-lager_file_name(_) ->
- false.
-
-lager_file_name1(Settings) when is_list(Settings) ->
- {file, FileName} = proplists:lookup(file, Settings),
- lager_util:expand_path(FileName);
-lager_file_name1({FileName, _}) -> lager_util:expand_path(FileName);
-lager_file_name1({FileName, _, _, _, _}) -> lager_util:expand_path(FileName);
-lager_file_name1(_) ->
- throw({error, {cannot_log_to_file, unknown,
- lager_file_backend_config_invalid}}).
-
-
-ensure_logfile_exist(FileName) ->
- LogFile = lager_util:expand_path(FileName),
- case rabbit_file:read_file_info(LogFile) of
- {ok,_} -> ok;
- {error, Err} -> throw({error, {cannot_log_to_file, LogFile, Err}})
- end.
-
-ensure_lager_configured() ->
- case lager_configured() of
- false -> configure_lager();
- true -> ok
- end.
-
-%% Lager should have handlers and sinks
-%% Error logger forwarding to syslog should be disabled
-lager_configured() ->
- Sinks = lager:list_all_sinks(),
- ExpectedSinks = list_expected_sinks(),
- application:get_env(lager, handlers) =/= undefined
- andalso
- lists:all(fun(S) -> lists:member(S, Sinks) end, ExpectedSinks)
- andalso
- application:get_env(syslog, syslog_error_logger) =/= undefined.
-
-configure_lager() ->
- ok = app_utils:load_applications([lager]),
- %% Turn off reformatting for error_logger messages
- case application:get_env(lager, error_logger_redirect) of
- undefined -> application:set_env(lager, error_logger_redirect, true);
- _ -> ok
- end,
- case application:get_env(lager, error_logger_format_raw) of
- undefined -> application:set_env(lager, error_logger_format_raw, true);
- _ -> ok
- end,
- case application:get_env(lager, log_root) of
- undefined ->
- %% Setting env var to 'undefined' is different from not
- %% setting it at all, and lager is sensitive to this
- %% difference.
- case application:get_env(rabbit, lager_log_root) of
- {ok, Value} ->
- ok = application:set_env(lager, log_root, Value);
- _ ->
- ok
- end;
- _ -> ok
- end,
- case application:get_env(lager, colored) of
- undefined ->
- UseColor = rabbit_prelaunch_early_logging:use_colored_logging(),
- application:set_env(lager, colored, UseColor);
- _ ->
- ok
- end,
- %% Set rabbit.log config variable based on environment.
- prepare_rabbit_log_config(),
- %% Configure syslog library.
- ok = configure_syslog_error_logger(),
- %% At this point we should have rabbit.log application variable
- %% configured to generate RabbitMQ log handlers.
- GeneratedHandlers = generate_lager_handlers(),
-
- %% If there are lager handlers configured,
- %% both lager and generate RabbitMQ handlers are used.
- %% This is because it's hard to decide clear preference rules.
- %% RabbitMQ handlers can be set to [] to use only lager handlers.
- Handlers = case application:get_env(lager, handlers, undefined) of
- undefined -> GeneratedHandlers;
- LagerHandlers ->
- %% Remove handlers generated in previous starts
- FormerRabbitHandlers = application:get_env(lager, rabbit_handlers, []),
- GeneratedHandlers ++ remove_rabbit_handlers(LagerHandlers,
- FormerRabbitHandlers)
- end,
-
- ok = application:set_env(lager, handlers, Handlers),
- ok = application:set_env(lager, rabbit_handlers, GeneratedHandlers),
-
- %% Setup extra sink/handlers. If they are not configured, redirect
- %% messages to the default sink. To know the list of expected extra
- %% sinks, we look at the 'lager_extra_sinks' compilation option.
- LogConfig = application:get_env(rabbit, log, []),
- LogLevels = application:get_env(rabbit, log_levels, []),
- Categories = proplists:get_value(categories, LogConfig, []),
- CategoriesConfig0 = case {Categories, LogLevels} of
- {[], []} -> [];
- {[], LogLevels} ->
- io:format("Using deprecated config parameter 'log_levels'. "
- "Please update your configuration file according to "
- "https://rabbitmq.com/logging.html"),
- lists:map(fun({Name, Level}) -> {Name, [{level, Level}]} end,
- LogLevels);
- {Categories, []} ->
- Categories;
- {Categories, _} ->
- io:format("Using the deprecated config parameter 'rabbit.log_levels' together "
- "with a new parameter for log categories."
- " 'rabbit.log_levels' will be ignored. Please remove it from the config. More at "
- "https://rabbitmq.com/logging.html"),
- Categories
- end,
- LogLevelsFromContext = case rabbit_prelaunch:get_context() of
- #{log_levels := LL} -> LL;
- _ -> undefined
- end,
- Fun = fun
- (global, _, CC) ->
- CC;
- (color, _, CC) ->
- CC;
- (CategoryS, LogLevel, CC) ->
- Category = list_to_atom(CategoryS),
- CCEntry = proplists:get_value(
- Category, CC, []),
- CCEntry1 = lists:ukeymerge(
- 1,
- [{level, LogLevel}],
- lists:ukeysort(1, CCEntry)),
- lists:keystore(
- Category, 1, CC, {Category, CCEntry1})
- end,
- CategoriesConfig = case LogLevelsFromContext of
- undefined ->
- CategoriesConfig0;
- _ ->
- maps:fold(Fun,
- CategoriesConfig0,
- LogLevelsFromContext)
- end,
- SinkConfigs = lists:map(
- fun({Name, Config}) ->
- {rabbit_log:make_internal_sink_name(Name), Config}
- end,
- CategoriesConfig),
- LagerSinks = application:get_env(lager, extra_sinks, []),
- GeneratedSinks = generate_lager_sinks(
- [error_logger_lager_event | list_expected_sinks()],
- SinkConfigs),
- Sinks = merge_lager_sink_handlers(LagerSinks, GeneratedSinks, []),
- ok = application:set_env(lager, extra_sinks, Sinks),
-
- case application:get_env(lager, error_logger_hwm) of
- undefined ->
- ok = application:set_env(lager, error_logger_hwm, 1000),
- % NB: 50 is the default value in lager.app.src
- ok = application:set_env(lager, error_logger_hwm_original, 50);
- {ok, Val} when is_integer(Val) andalso Val < 1000 ->
- ok = application:set_env(lager, error_logger_hwm, 1000),
- ok = application:set_env(lager, error_logger_hwm_original, Val);
- {ok, Val} when is_integer(Val) ->
- ok = application:set_env(lager, error_logger_hwm_original, Val),
- ok
- end,
- ok.
-
-configure_syslog_error_logger() ->
- %% Disable error_logger forwarding to syslog if it's not configured
- case application:get_env(syslog, syslog_error_logger) of
- undefined ->
- application:set_env(syslog, syslog_error_logger, false);
- _ -> ok
- end.
-
-remove_rabbit_handlers(Handlers, FormerHandlers) ->
- lists:filter(fun(Handler) ->
- not lists:member(Handler, FormerHandlers)
- end,
- Handlers).
-
-generate_lager_handlers() ->
- LogConfig = application:get_env(rabbit, log, []),
- LogHandlersConfig = lists:keydelete(categories, 1, LogConfig),
- generate_lager_handlers(LogHandlersConfig).
-
-generate_lager_handlers(LogHandlersConfig) ->
- lists:flatmap(
- fun
- ({file, HandlerConfig}) ->
- case proplists:get_value(file, HandlerConfig, false) of
- false -> [];
- FileName when is_list(FileName) ->
- Backend = lager_backend(file),
- generate_handler(Backend, HandlerConfig)
- end;
- ({Other, HandlerConfig}) when
- Other =:= console; Other =:= syslog; Other =:= exchange ->
- case proplists:get_value(enabled, HandlerConfig, false) of
- false -> [];
- true ->
- Backend = lager_backend(Other),
- generate_handler(Backend,
- lists:keydelete(enabled, 1, HandlerConfig))
- end
- end,
- LogHandlersConfig).
-
-lager_backend(file) -> lager_file_backend;
-lager_backend(console) -> lager_console_backend;
-lager_backend(syslog) -> syslog_lager_backend;
-lager_backend(exchange) -> lager_exchange_backend.
-
-%% Syslog backend is using an old API for configuration and
-%% does not support proplists.
-generate_handler(syslog_lager_backend=Backend, HandlerConfig) ->
- %% The default log level is set to `debug` because the actual
- %% filtering is made at the sink level. We want to accept all
- %% messages here.
- DefaultConfigVal = debug,
- Level = proplists:get_value(level, HandlerConfig, DefaultConfigVal),
- ok = configure_handler_backend(Backend),
- [{Backend,
- [Level,
- {},
- {lager_default_formatter, syslog_formatter_config()}]}];
-generate_handler(Backend, HandlerConfig) ->
- [{Backend,
- lists:ukeymerge(1, lists:ukeysort(1, HandlerConfig),
- lists:ukeysort(1, default_handler_config(Backend)))}].
-
-configure_handler_backend(syslog_lager_backend) ->
- {ok, _} = application:ensure_all_started(syslog),
- ok;
-configure_handler_backend(_Backend) ->
- ok.
-
-default_handler_config(lager_console_backend) ->
- %% The default log level is set to `debug` because the actual
- %% filtering is made at the sink level. We want to accept all
- %% messages here.
- DefaultConfigVal = debug,
- [{level, DefaultConfigVal},
- {formatter_config, default_config_value({formatter_config, console})}];
-default_handler_config(lager_exchange_backend) ->
- %% The default log level is set to `debug` because the actual
- %% filtering is made at the sink level. We want to accept all
- %% messages here.
- DefaultConfigVal = debug,
- [{level, DefaultConfigVal},
- {formatter_config, default_config_value({formatter_config, exchange})}];
-default_handler_config(lager_file_backend) ->
- %% The default log level is set to `debug` because the actual
- %% filtering is made at the sink level. We want to accept all
- %% messages here.
- DefaultConfigVal = debug,
- [{level, DefaultConfigVal},
- {formatter_config, default_config_value({formatter_config, file})},
- {date, ""},
- {size, 0}].
-
-default_config_value(level) ->
- LogConfig = application:get_env(rabbit, log, []),
- FoldFun = fun
- ({_, Cfg}, LL) when is_list(Cfg) ->
- NewLL = proplists:get_value(level, Cfg, LL),
- case LL of
- undefined ->
- NewLL;
- _ ->
- MoreVerbose = lager_util:level_to_num(NewLL) > lager_util:level_to_num(LL),
- case MoreVerbose of
- true -> NewLL;
- false -> LL
- end
- end;
- (_, LL) ->
- LL
- end,
- FoundLL = lists:foldl(FoldFun, undefined, LogConfig),
- case FoundLL of
- undefined -> info;
- _ -> FoundLL
- end;
-default_config_value({formatter_config, console}) ->
- EOL = case application:get_env(lager, colored) of
- {ok, true} -> "\e[0m\r\n";
- _ -> "\r\n"
- end,
- [date, " ", time, " ", color, "[", severity, "] ",
- {pid, ""},
- " ", message, EOL];
-default_config_value({formatter_config, _}) ->
- [date, " ", time, " ", color, "[", severity, "] ",
- {pid, ""},
- " ", message, "\n"].
-
-syslog_formatter_config() ->
- [color, "[", severity, "] ",
- {pid, ""},
- " ", message, "\n"].
-
-prepare_rabbit_log_config() ->
- %% If RABBIT_LOGS is not set, we should ignore it.
- DefaultFile = application:get_env(rabbit, lager_default_file, undefined),
- %% If RABBIT_UPGRADE_LOGS is not set, we should ignore it.
- UpgradeFile = application:get_env(rabbit, lager_upgrade_file, undefined),
- case DefaultFile of
- undefined -> ok;
- false ->
- set_env_default_log_disabled();
- tty ->
- set_env_default_log_console();
- FileName when is_list(FileName) ->
- case rabbit_prelaunch:get_context() of
- %% The user explicitly sets $RABBITMQ_LOGS;
- %% we should override a file location even
- %% if it's set in rabbitmq.config
- #{var_origins := #{main_log_file := environment}} ->
- set_env_default_log_file(FileName, override);
- _ ->
- set_env_default_log_file(FileName, keep)
- end
- end,
-
- %% Upgrade log file never overrides the value set in rabbitmq.config
- case UpgradeFile of
- %% No special env for upgrade logs - redirect to the default sink
- undefined -> ok;
- %% Redirect logs to default output.
- DefaultFile -> ok;
- UpgradeFileName when is_list(UpgradeFileName) ->
- set_env_upgrade_log_file(UpgradeFileName)
- end.
-
-set_env_default_log_disabled() ->
- %% Disabling all the logs.
- ok = application:set_env(rabbit, log, []).
-
-set_env_default_log_console() ->
- LogConfig = application:get_env(rabbit, log, []),
- ConsoleConfig = proplists:get_value(console, LogConfig, []),
- LogConfigConsole =
- lists:keystore(console, 1, LogConfig,
- {console, lists:keystore(enabled, 1, ConsoleConfig,
- {enabled, true})}),
- %% Remove the file handler - disable logging to file
- LogConfigConsoleNoFile = lists:keydelete(file, 1, LogConfigConsole),
- ok = application:set_env(rabbit, log, LogConfigConsoleNoFile).
-
-set_env_default_log_file(FileName, Override) ->
- LogConfig = application:get_env(rabbit, log, []),
- FileConfig = proplists:get_value(file, LogConfig, []),
- NewLogConfig = case proplists:get_value(file, FileConfig, undefined) of
- undefined ->
- lists:keystore(file, 1, LogConfig,
- {file, lists:keystore(file, 1, FileConfig,
- {file, FileName})});
- _ConfiguredFileName ->
- case Override of
- override ->
- lists:keystore(
- file, 1, LogConfig,
- {file, lists:keystore(file, 1, FileConfig,
- {file, FileName})});
- keep ->
- LogConfig
- end
- end,
- ok = application:set_env(rabbit, log, NewLogConfig).
-
-set_env_upgrade_log_file(FileName) ->
- LogConfig = application:get_env(rabbit, log, []),
- SinksConfig = proplists:get_value(categories, LogConfig, []),
- UpgradeSinkConfig = proplists:get_value(upgrade, SinksConfig, []),
- FileConfig = proplists:get_value(file, SinksConfig, []),
- NewLogConfig = case proplists:get_value(file, FileConfig, undefined) of
- undefined ->
- lists:keystore(
- categories, 1, LogConfig,
- {categories,
- lists:keystore(
- upgrade, 1, SinksConfig,
- {upgrade,
- lists:keystore(file, 1, UpgradeSinkConfig,
- {file, FileName})})});
- %% No cahnge. We don't want to override the configured value.
- _File -> LogConfig
- end,
- ok = application:set_env(rabbit, log, NewLogConfig).
-
-generate_lager_sinks(SinkNames, SinkConfigs) ->
- LogLevels = case rabbit_prelaunch:get_context() of
- #{log_levels := LL} -> LL;
- _ -> undefined
- end,
- DefaultLogLevel = case LogLevels of
- #{global := LogLevel} ->
- LogLevel;
- _ ->
- default_config_value(level)
- end,
- lists:map(fun(SinkName) ->
- SinkConfig = proplists:get_value(SinkName, SinkConfigs, []),
- SinkHandlers = case proplists:get_value(file, SinkConfig, false) of
- %% If no file defined - forward everything to the default backend
- false ->
- ForwarderLevel = proplists:get_value(level,
- SinkConfig,
- DefaultLogLevel),
- [{lager_forwarder_backend,
- [lager_util:make_internal_sink_name(lager), ForwarderLevel]}];
- %% If a file defined - add a file backend to handlers and remove all default file backends.
- File ->
- %% Use `debug` as a default handler to not override a handler level
- Level = proplists:get_value(level, SinkConfig, DefaultLogLevel),
- DefaultGeneratedHandlers = application:get_env(lager, rabbit_handlers, []),
- SinkFileHandlers = case proplists:get_value(lager_file_backend, DefaultGeneratedHandlers, undefined) of
- undefined ->
- %% Create a new file handler.
- %% `info` is a default level here.
- FileLevel = proplists:get_value(level, SinkConfig, DefaultLogLevel),
- generate_lager_handlers([{file, [{file, File}, {level, FileLevel}]}]);
- FileHandler ->
- %% Replace a filename in the handler
- FileHandlerChanges = case handler_level_more_verbose(FileHandler, Level) of
- true -> [{file, File}, {level, Level}];
- false -> [{file, File}]
- end,
-
- [{lager_file_backend,
- lists:ukeymerge(1, FileHandlerChanges,
- lists:ukeysort(1, FileHandler))}]
- end,
- %% Remove all file handlers.
- AllLagerHandlers = application:get_env(lager, handlers, []),
- HandlersWithoutFile = lists:filter(
- fun({lager_file_backend, _}) -> false;
- ({_, _}) -> true
- end,
- AllLagerHandlers),
- %% Set level for handlers which are more verbose.
- %% We don't increase verbosity in sinks so it works like forwarder backend.
- HandlersWithoutFileWithLevel = lists:map(fun({Name, Handler}) ->
- case handler_level_more_verbose(Handler, Level) of
- true -> {Name, lists:keystore(level, 1, Handler, {level, Level})};
- false -> {Name, Handler}
- end
- end,
- HandlersWithoutFile),
-
- HandlersWithoutFileWithLevel ++ SinkFileHandlers
- end,
- {SinkName, [{handlers, SinkHandlers}, {rabbit_handlers, SinkHandlers}]}
- end,
- SinkNames).
-
-handler_level_more_verbose(Handler, Level) ->
- HandlerLevel = proplists:get_value(level, Handler, default_config_value(level)),
- lager_util:level_to_num(HandlerLevel) > lager_util:level_to_num(Level).
-
-merge_lager_sink_handlers([{Name, Sink} | RestSinks], GeneratedSinks, Agg) ->
- %% rabbitmq/rabbitmq-server#2044.
- %% We have to take into account that a sink's
- %% handler backend may need additional configuration here.
- %% {rabbit_log_federation_lager_event, [
- %% {handlers, [
- %% {lager_forwarder_backend, [lager_event,inherit]},
- %% {syslog_lager_backend, [debug]}
- %% ]},
- %% {rabbit_handlers, [
- %% {lager_forwarder_backend, [lager_event,inherit]}
- %% ]}
- %% ]}
- case lists:keytake(Name, 1, GeneratedSinks) of
- {value, {Name, GenSink}, RestGeneratedSinks} ->
- Handlers = proplists:get_value(handlers, Sink, []),
- GenHandlers = proplists:get_value(handlers, GenSink, []),
- FormerRabbitHandlers = proplists:get_value(rabbit_handlers, Sink, []),
-
- %% Remove handlers defined in previous starts
- ConfiguredHandlers = remove_rabbit_handlers(Handlers, FormerRabbitHandlers),
- NewHandlers = GenHandlers ++ ConfiguredHandlers,
- ok = maybe_configure_handler_backends(NewHandlers),
- MergedSink = lists:keystore(rabbit_handlers, 1,
- lists:keystore(handlers, 1, Sink,
- {handlers, NewHandlers}),
- {rabbit_handlers, GenHandlers}),
- merge_lager_sink_handlers(
- RestSinks,
- RestGeneratedSinks,
- [{Name, MergedSink} | Agg]);
- false ->
- merge_lager_sink_handlers(
- RestSinks,
- GeneratedSinks,
- [{Name, Sink} | Agg])
- end;
-merge_lager_sink_handlers([], GeneratedSinks, Agg) -> GeneratedSinks ++ Agg.
-
-maybe_configure_handler_backends([]) ->
- ok;
-maybe_configure_handler_backends([{Backend, _}|Backends]) ->
- ok = configure_handler_backend(Backend),
- maybe_configure_handler_backends(Backends).
-
-list_expected_sinks() ->
- rabbit_prelaunch_early_logging:list_expected_sinks().
-
-maybe_remove_logger_handler() ->
- M = logger,
- F = remove_handler,
- try
- ok = erlang:apply(M, F, [default])
- catch
- error:undef ->
- % OK since the logger module only exists in OTP 21.1 or later
- ok;
- error:{badmatch, {error, {not_found, default}}} ->
- % OK - this error happens when running a CLI command
- ok;
- Err:Reason ->
- error_logger:error_msg("calling ~p:~p failed: ~p:~p~n",
- [M, F, Err, Reason])
- end.
-
-get_most_verbose_log_level() ->
- {ok, HandlersA} = application:get_env(lager, handlers),
- {ok, ExtraSinks} = application:get_env(lager, extra_sinks),
- HandlersB = lists:append(
- [H || {_, Keys} <- ExtraSinks,
- {handlers, H} <- Keys]),
- get_most_verbose_log_level(HandlersA ++ HandlersB,
- lager_util:level_to_num(none)).
-
-get_most_verbose_log_level([{_, Props} | Rest], MostVerbose) ->
- LogLevel = proplists:get_value(level, Props, info),
- LogLevelNum = lager_util:level_to_num(LogLevel),
- case LogLevelNum > MostVerbose of
- true ->
- get_most_verbose_log_level(Rest, LogLevelNum);
- false ->
- get_most_verbose_log_level(Rest, MostVerbose)
- end;
-get_most_verbose_log_level([], MostVerbose) ->
- lager_util:num_to_level(MostVerbose).
diff --git a/deps/rabbit/src/rabbit_limiter.erl b/deps/rabbit/src/rabbit_limiter.erl
index d3803957d3..93946836eb 100644
--- a/deps/rabbit/src/rabbit_limiter.erl
+++ b/deps/rabbit/src/rabbit_limiter.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% The purpose of the limiter is to stem the flow of messages from
@@ -110,7 +110,7 @@
-module(rabbit_limiter).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(gen_server2).
diff --git a/deps/rabbit/src/rabbit_log_channel.erl b/deps/rabbit/src/rabbit_log_channel.erl
new file mode 100644
index 0000000000..efeac31e89
--- /dev/null
+++ b/deps/rabbit/src/rabbit_log_channel.erl
@@ -0,0 +1,120 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_channel).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include_lib("rabbit_common/include/logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+-spec debug(string()) -> 'ok'.
+debug(Format) -> debug(Format, []).
+
+-spec debug(string(), [any()]) -> 'ok'.
+debug(Format, Args) -> debug(self(), Format, Args).
+
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CHAN}).
+
+-spec info(string()) -> 'ok'.
+info(Format) -> info(Format, []).
+
+-spec info(string(), [any()]) -> 'ok'.
+info(Format, Args) -> info(self(), Format, Args).
+
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CHAN}).
+
+-spec notice(string()) -> 'ok'.
+notice(Format) -> notice(Format, []).
+
+-spec notice(string(), [any()]) -> 'ok'.
+notice(Format, Args) -> notice(self(), Format, Args).
+
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CHAN}).
+
+-spec warning(string()) -> 'ok'.
+warning(Format) -> warning(Format, []).
+
+-spec warning(string(), [any()]) -> 'ok'.
+warning(Format, Args) -> warning(self(), Format, Args).
+
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CHAN}).
+
+-spec error(string()) -> 'ok'.
+error(Format) -> error(Format, []).
+
+-spec error(string(), [any()]) -> 'ok'.
+error(Format, Args) -> error(self(), Format, Args).
+
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CHAN}).
+
+-spec critical(string()) -> 'ok'.
+critical(Format) -> critical(Format, []).
+
+-spec critical(string(), [any()]) -> 'ok'.
+critical(Format, Args) -> critical(self(), Format, Args).
+
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CHAN}).
+
+-spec alert(string()) -> 'ok'.
+alert(Format) -> alert(Format, []).
+
+-spec alert(string(), [any()]) -> 'ok'.
+alert(Format, Args) -> alert(self(), Format, Args).
+
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CHAN}).
+
+-spec emergency(string()) -> 'ok'.
+emergency(Format) -> emergency(Format, []).
+
+-spec emergency(string(), [any()]) -> 'ok'.
+emergency(Format, Args) -> emergency(self(), Format, Args).
+
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CHAN}).
+
+-spec none(string()) -> 'ok'.
+none(_Format) -> ok.
+
+-spec none(string(), [any()]) -> 'ok'.
+none(_Format, _Args) -> ok.
+
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbit/src/rabbit_log_connection.erl b/deps/rabbit/src/rabbit_log_connection.erl
new file mode 100644
index 0000000000..0dd8fe6e81
--- /dev/null
+++ b/deps/rabbit/src/rabbit_log_connection.erl
@@ -0,0 +1,120 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_connection).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include_lib("rabbit_common/include/logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+-spec debug(string()) -> 'ok'.
+debug(Format) -> debug(Format, []).
+
+-spec debug(string(), [any()]) -> 'ok'.
+debug(Format, Args) -> debug(self(), Format, Args).
+
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CONN}).
+
+-spec info(string()) -> 'ok'.
+info(Format) -> info(Format, []).
+
+-spec info(string(), [any()]) -> 'ok'.
+info(Format, Args) -> info(self(), Format, Args).
+
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CONN}).
+
+-spec notice(string()) -> 'ok'.
+notice(Format) -> notice(Format, []).
+
+-spec notice(string(), [any()]) -> 'ok'.
+notice(Format, Args) -> notice(self(), Format, Args).
+
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CONN}).
+
+-spec warning(string()) -> 'ok'.
+warning(Format) -> warning(Format, []).
+
+-spec warning(string(), [any()]) -> 'ok'.
+warning(Format, Args) -> warning(self(), Format, Args).
+
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CONN}).
+
+-spec error(string()) -> 'ok'.
+error(Format) -> error(Format, []).
+
+-spec error(string(), [any()]) -> 'ok'.
+error(Format, Args) -> error(self(), Format, Args).
+
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CONN}).
+
+-spec critical(string()) -> 'ok'.
+critical(Format) -> critical(Format, []).
+
+-spec critical(string(), [any()]) -> 'ok'.
+critical(Format, Args) -> critical(self(), Format, Args).
+
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CONN}).
+
+-spec alert(string()) -> 'ok'.
+alert(Format) -> alert(Format, []).
+
+-spec alert(string(), [any()]) -> 'ok'.
+alert(Format, Args) -> alert(self(), Format, Args).
+
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CONN}).
+
+-spec emergency(string()) -> 'ok'.
+emergency(Format) -> emergency(Format, []).
+
+-spec emergency(string(), [any()]) -> 'ok'.
+emergency(Format, Args) -> emergency(self(), Format, Args).
+
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_CONN}).
+
+-spec none(string()) -> 'ok'.
+none(_Format) -> ok.
+
+-spec none(string(), [any()]) -> 'ok'.
+none(_Format, _Args) -> ok.
+
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbit/src/rabbit_log_feature_flags.erl b/deps/rabbit/src/rabbit_log_feature_flags.erl
new file mode 100644
index 0000000000..da52f39b67
--- /dev/null
+++ b/deps/rabbit/src/rabbit_log_feature_flags.erl
@@ -0,0 +1,120 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_feature_flags).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include_lib("rabbit_common/include/logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+-spec debug(string()) -> 'ok'.
+debug(Format) -> debug(Format, []).
+
+-spec debug(string(), [any()]) -> 'ok'.
+debug(Format, Args) -> debug(self(), Format, Args).
+
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEAT_FLAGS}).
+
+-spec info(string()) -> 'ok'.
+info(Format) -> info(Format, []).
+
+-spec info(string(), [any()]) -> 'ok'.
+info(Format, Args) -> info(self(), Format, Args).
+
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEAT_FLAGS}).
+
+-spec notice(string()) -> 'ok'.
+notice(Format) -> notice(Format, []).
+
+-spec notice(string(), [any()]) -> 'ok'.
+notice(Format, Args) -> notice(self(), Format, Args).
+
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEAT_FLAGS}).
+
+-spec warning(string()) -> 'ok'.
+warning(Format) -> warning(Format, []).
+
+-spec warning(string(), [any()]) -> 'ok'.
+warning(Format, Args) -> warning(self(), Format, Args).
+
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEAT_FLAGS}).
+
+-spec error(string()) -> 'ok'.
+error(Format) -> error(Format, []).
+
+-spec error(string(), [any()]) -> 'ok'.
+error(Format, Args) -> error(self(), Format, Args).
+
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEAT_FLAGS}).
+
+-spec critical(string()) -> 'ok'.
+critical(Format) -> critical(Format, []).
+
+-spec critical(string(), [any()]) -> 'ok'.
+critical(Format, Args) -> critical(self(), Format, Args).
+
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEAT_FLAGS}).
+
+-spec alert(string()) -> 'ok'.
+alert(Format) -> alert(Format, []).
+
+-spec alert(string(), [any()]) -> 'ok'.
+alert(Format, Args) -> alert(self(), Format, Args).
+
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEAT_FLAGS}).
+
+-spec emergency(string()) -> 'ok'.
+emergency(Format) -> emergency(Format, []).
+
+-spec emergency(string(), [any()]) -> 'ok'.
+emergency(Format, Args) -> emergency(self(), Format, Args).
+
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEAT_FLAGS}).
+
+-spec none(string()) -> 'ok'.
+none(_Format) -> ok.
+
+-spec none(string(), [any()]) -> 'ok'.
+none(_Format, _Args) -> ok.
+
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbit/src/rabbit_log_mirroring.erl b/deps/rabbit/src/rabbit_log_mirroring.erl
new file mode 100644
index 0000000000..b7e6d73650
--- /dev/null
+++ b/deps/rabbit/src/rabbit_log_mirroring.erl
@@ -0,0 +1,122 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_mirroring).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include_lib("rabbit_common/include/logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+%%----------------------------------------------------------------------------
+
+-spec debug(string()) -> 'ok'.
+debug(Format) -> debug(Format, []).
+
+-spec debug(string(), [any()]) -> 'ok'.
+debug(Format, Args) -> debug(self(), Format, Args).
+
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_MIRRORING}).
+
+-spec info(string()) -> 'ok'.
+info(Format) -> info(Format, []).
+
+-spec info(string(), [any()]) -> 'ok'.
+info(Format, Args) -> info(self(), Format, Args).
+
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_MIRRORING}).
+
+-spec notice(string()) -> 'ok'.
+notice(Format) -> notice(Format, []).
+
+-spec notice(string(), [any()]) -> 'ok'.
+notice(Format, Args) -> notice(self(), Format, Args).
+
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_MIRRORING}).
+
+-spec warning(string()) -> 'ok'.
+warning(Format) -> warning(Format, []).
+
+-spec warning(string(), [any()]) -> 'ok'.
+warning(Format, Args) -> warning(self(), Format, Args).
+
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_MIRRORING}).
+
+-spec error(string()) -> 'ok'.
+error(Format) -> error(Format, []).
+
+-spec error(string(), [any()]) -> 'ok'.
+error(Format, Args) -> error(self(), Format, Args).
+
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_MIRRORING}).
+
+-spec critical(string()) -> 'ok'.
+critical(Format) -> critical(Format, []).
+
+-spec critical(string(), [any()]) -> 'ok'.
+critical(Format, Args) -> critical(self(), Format, Args).
+
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_MIRRORING}).
+
+-spec alert(string()) -> 'ok'.
+alert(Format) -> alert(Format, []).
+
+-spec alert(string(), [any()]) -> 'ok'.
+alert(Format, Args) -> alert(self(), Format, Args).
+
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_MIRRORING}).
+
+-spec emergency(string()) -> 'ok'.
+emergency(Format) -> emergency(Format, []).
+
+-spec emergency(string(), [any()]) -> 'ok'.
+emergency(Format, Args) -> emergency(self(), Format, Args).
+
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_MIRRORING}).
+
+-spec none(string()) -> 'ok'.
+none(_Format) -> ok.
+
+-spec none(string(), [any()]) -> 'ok'.
+none(_Format, _Args) -> ok.
+
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbit/src/rabbit_log_prelaunch.erl b/deps/rabbit/src/rabbit_log_prelaunch.erl
new file mode 100644
index 0000000000..93600087f3
--- /dev/null
+++ b/deps/rabbit/src/rabbit_log_prelaunch.erl
@@ -0,0 +1,120 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_prelaunch).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include_lib("rabbit_common/include/logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+-spec debug(string()) -> 'ok'.
+debug(Format) -> debug(Format, []).
+
+-spec debug(string(), [any()]) -> 'ok'.
+debug(Format, Args) -> debug(self(), Format, Args).
+
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+-spec info(string()) -> 'ok'.
+info(Format) -> info(Format, []).
+
+-spec info(string(), [any()]) -> 'ok'.
+info(Format, Args) -> info(self(), Format, Args).
+
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+-spec notice(string()) -> 'ok'.
+notice(Format) -> notice(Format, []).
+
+-spec notice(string(), [any()]) -> 'ok'.
+notice(Format, Args) -> notice(self(), Format, Args).
+
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+-spec warning(string()) -> 'ok'.
+warning(Format) -> warning(Format, []).
+
+-spec warning(string(), [any()]) -> 'ok'.
+warning(Format, Args) -> warning(self(), Format, Args).
+
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+-spec error(string()) -> 'ok'.
+error(Format) -> error(Format, []).
+
+-spec error(string(), [any()]) -> 'ok'.
+error(Format, Args) -> error(self(), Format, Args).
+
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+-spec critical(string()) -> 'ok'.
+critical(Format) -> critical(Format, []).
+
+-spec critical(string(), [any()]) -> 'ok'.
+critical(Format, Args) -> critical(self(), Format, Args).
+
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+-spec alert(string()) -> 'ok'.
+alert(Format) -> alert(Format, []).
+
+-spec alert(string(), [any()]) -> 'ok'.
+alert(Format, Args) -> alert(self(), Format, Args).
+
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+-spec emergency(string()) -> 'ok'.
+emergency(Format) -> emergency(Format, []).
+
+-spec emergency(string(), [any()]) -> 'ok'.
+emergency(Format, Args) -> emergency(self(), Format, Args).
+
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
+
+-spec none(string()) -> 'ok'.
+none(_Format) -> ok.
+
+-spec none(string(), [any()]) -> 'ok'.
+none(_Format, _Args) -> ok.
+
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbit/src/rabbit_log_queue.erl b/deps/rabbit/src/rabbit_log_queue.erl
new file mode 100644
index 0000000000..08632c015c
--- /dev/null
+++ b/deps/rabbit/src/rabbit_log_queue.erl
@@ -0,0 +1,120 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_queue).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include_lib("rabbit_common/include/logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+-spec debug(string()) -> 'ok'.
+debug(Format) -> debug(Format, []).
+
+-spec debug(string(), [any()]) -> 'ok'.
+debug(Format, Args) -> debug(self(), Format, Args).
+
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_QUEUE}).
+
+-spec info(string()) -> 'ok'.
+info(Format) -> info(Format, []).
+
+-spec info(string(), [any()]) -> 'ok'.
+info(Format, Args) -> info(self(), Format, Args).
+
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_QUEUE}).
+
+-spec notice(string()) -> 'ok'.
+notice(Format) -> notice(Format, []).
+
+-spec notice(string(), [any()]) -> 'ok'.
+notice(Format, Args) -> notice(self(), Format, Args).
+
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_QUEUE}).
+
+-spec warning(string()) -> 'ok'.
+warning(Format) -> warning(Format, []).
+
+-spec warning(string(), [any()]) -> 'ok'.
+warning(Format, Args) -> warning(self(), Format, Args).
+
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_QUEUE}).
+
+-spec error(string()) -> 'ok'.
+error(Format) -> error(Format, []).
+
+-spec error(string(), [any()]) -> 'ok'.
+error(Format, Args) -> error(self(), Format, Args).
+
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_QUEUE}).
+
+-spec critical(string()) -> 'ok'.
+critical(Format) -> critical(Format, []).
+
+-spec critical(string(), [any()]) -> 'ok'.
+critical(Format, Args) -> critical(self(), Format, Args).
+
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_QUEUE}).
+
+-spec alert(string()) -> 'ok'.
+alert(Format) -> alert(Format, []).
+
+-spec alert(string(), [any()]) -> 'ok'.
+alert(Format, Args) -> alert(self(), Format, Args).
+
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_QUEUE}).
+
+-spec emergency(string()) -> 'ok'.
+emergency(Format) -> emergency(Format, []).
+
+-spec emergency(string(), [any()]) -> 'ok'.
+emergency(Format, Args) -> emergency(self(), Format, Args).
+
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_QUEUE}).
+
+-spec none(string()) -> 'ok'.
+none(_Format) -> ok.
+
+-spec none(string(), [any()]) -> 'ok'.
+none(_Format, _Args) -> ok.
+
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbit/src/rabbit_log_tail.erl b/deps/rabbit/src/rabbit_log_tail.erl
index c3faad07fc..ccf48a9136 100644
--- a/deps/rabbit/src/rabbit_log_tail.erl
+++ b/deps/rabbit/src/rabbit_log_tail.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_log_tail).
diff --git a/deps/rabbit/src/rabbit_log_upgrade.erl b/deps/rabbit/src/rabbit_log_upgrade.erl
new file mode 100644
index 0000000000..ff54a2a350
--- /dev/null
+++ b/deps/rabbit/src/rabbit_log_upgrade.erl
@@ -0,0 +1,122 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_upgrade).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include_lib("rabbit_common/include/logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+%%----------------------------------------------------------------------------
+
+-spec debug(string()) -> 'ok'.
+debug(Format) -> debug(Format, []).
+
+-spec debug(string(), [any()]) -> 'ok'.
+debug(Format, Args) -> debug(self(), Format, Args).
+
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_UPGRADE}).
+
+-spec info(string()) -> 'ok'.
+info(Format) -> info(Format, []).
+
+-spec info(string(), [any()]) -> 'ok'.
+info(Format, Args) -> info(self(), Format, Args).
+
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_UPGRADE}).
+
+-spec notice(string()) -> 'ok'.
+notice(Format) -> notice(Format, []).
+
+-spec notice(string(), [any()]) -> 'ok'.
+notice(Format, Args) -> notice(self(), Format, Args).
+
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_UPGRADE}).
+
+-spec warning(string()) -> 'ok'.
+warning(Format) -> warning(Format, []).
+
+-spec warning(string(), [any()]) -> 'ok'.
+warning(Format, Args) -> warning(self(), Format, Args).
+
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_UPGRADE}).
+
+-spec error(string()) -> 'ok'.
+error(Format) -> error(Format, []).
+
+-spec error(string(), [any()]) -> 'ok'.
+error(Format, Args) -> error(self(), Format, Args).
+
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_UPGRADE}).
+
+-spec critical(string()) -> 'ok'.
+critical(Format) -> critical(Format, []).
+
+-spec critical(string(), [any()]) -> 'ok'.
+critical(Format, Args) -> critical(self(), Format, Args).
+
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_UPGRADE}).
+
+-spec alert(string()) -> 'ok'.
+alert(Format) -> alert(Format, []).
+
+-spec alert(string(), [any()]) -> 'ok'.
+alert(Format, Args) -> alert(self(), Format, Args).
+
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_UPGRADE}).
+
+-spec emergency(string()) -> 'ok'.
+emergency(Format) -> emergency(Format, []).
+
+-spec emergency(string(), [any()]) -> 'ok'.
+emergency(Format, Args) -> emergency(self(), Format, Args).
+
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_UPGRADE}).
+
+-spec none(string()) -> 'ok'.
+none(_Format) -> ok.
+
+-spec none(string(), [any()]) -> 'ok'.
+none(_Format, _Args) -> ok.
+
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbit/src/rabbit_logger_exchange_h.erl b/deps/rabbit/src/rabbit_logger_exchange_h.erl
new file mode 100644
index 0000000000..6a905a3452
--- /dev/null
+++ b/deps/rabbit/src/rabbit_logger_exchange_h.erl
@@ -0,0 +1,176 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_logger_exchange_h).
+
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+-include_lib("rabbit_common/include/logging.hrl").
+
+%% logger callbacks
+-export([log/2, adding_handler/1, removing_handler/1, changing_config/3,
+ filter_config/1]).
+
+-define(DECL_EXCHANGE_INTERVAL_SECS, 5).
+-define(LOG_EXCH_NAME, <<"amq.rabbitmq.log">>).
+-define(DEFAULT_FORMATTER, logger_formatter).
+-define(DEFAULT_FORMATTER_CONFIG, #{}).
+
+%% -------------------------------------------------------------------
+%% Logger handler callbacks.
+%% -------------------------------------------------------------------
+
+adding_handler(Config) ->
+ Config1 = start_setup_proc(Config),
+ {ok, Config1}.
+
+changing_config(_SetOrUpdate, OldConfig, _NewConfig) ->
+ {ok, OldConfig}.
+
+filter_config(Config) ->
+ Config.
+
+log(#{meta := #{mfa := {?MODULE, _, _}}}, _) ->
+ ok;
+log(LogEvent, Config) ->
+ case rabbit_boot_state:get() of
+ ready -> do_log(LogEvent, Config);
+ _ -> ok
+ end.
+
+do_log(LogEvent, #{config := #{exchange := Exchange}} = Config) ->
+ RoutingKey = make_routing_key(LogEvent, Config),
+ AmqpMsg = log_event_to_amqp_msg(LogEvent, Config),
+ Body = try_format_body(LogEvent, Config),
+ case rabbit_basic:publish(Exchange, RoutingKey, AmqpMsg, Body) of
+ ok -> ok;
+ {error, not_found} -> ok
+ end.
+
+removing_handler(Config) ->
+ unconfigure_exchange(Config),
+ ok.
+
+%% -------------------------------------------------------------------
+%% Internal functions.
+%% -------------------------------------------------------------------
+
+log_event_to_amqp_msg(LogEvent, Config) ->
+ ContentType = guess_content_type(Config),
+ Timestamp = make_timestamp(LogEvent, Config),
+ Headers = make_headers(LogEvent, Config),
+ #'P_basic'{
+ content_type = ContentType,
+ timestamp = Timestamp,
+ headers = Headers
+ }.
+
+make_routing_key(#{level := Level}, _) ->
+ rabbit_data_coercion:to_binary(Level).
+
+guess_content_type(#{formatter := {rabbit_logger_json_fmt, _}}) ->
+ <<"application/json">>;
+guess_content_type(_) ->
+ <<"text/plain">>.
+
+make_timestamp(#{meta := #{time := Timestamp}}, _) ->
+ erlang:convert_time_unit(Timestamp, microsecond, second);
+make_timestamp(_, _) ->
+ os:system_time(second).
+
+make_headers(_, _) ->
+ Node = rabbit_data_coercion:to_binary(node()),
+ [{<<"node">>, longstr, Node}].
+
+try_format_body(LogEvent, #{formatter := {Formatter, FormatterConfig}}) ->
+ Formatted = try_format_body(LogEvent, Formatter, FormatterConfig),
+ erlang:iolist_to_binary(Formatted).
+
+try_format_body(LogEvent, Formatter, FormatterConfig) ->
+ try
+ Formatter:format(LogEvent, FormatterConfig)
+ catch
+ C:R:S ->
+ case {?DEFAULT_FORMATTER, ?DEFAULT_FORMATTER_CONFIG} of
+ {Formatter, FormatterConfig} ->
+ "DEFAULT FORMATTER CRASHED\n";
+ {DefaultFormatter, DefaultFormatterConfig} ->
+ Msg = {"FORMATTER CRASH: ~tp -- ~p:~p:~p",
+ [maps:get(msg, LogEvent), C, R, S]},
+ LogEvent1 = LogEvent#{msg => Msg},
+ try_format_body(
+ LogEvent1,
+ DefaultFormatter,
+ DefaultFormatterConfig)
+ end
+ end.
+
+start_setup_proc(#{config := InternalConfig} = Config) ->
+ {ok, DefaultVHost} = application:get_env(rabbit, default_vhost),
+ Exchange = rabbit_misc:r(DefaultVHost, exchange, ?LOG_EXCH_NAME),
+ InternalConfig1 = InternalConfig#{exchange => Exchange},
+
+ Pid = spawn(fun() -> setup_proc(Config#{config => InternalConfig1}) end),
+ InternalConfig2 = InternalConfig1#{setup_proc => Pid},
+ Config#{config => InternalConfig2}.
+
+setup_proc(
+ #{config := #{exchange := #resource{name = Name,
+ virtual_host = VHost}}} = Config) ->
+ case declare_exchange(Config) of
+ ok ->
+ ?LOG_INFO(
+ "Logging to exchange '~s' in vhost '~s' ready", [Name, VHost],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL});
+ error ->
+ ?LOG_DEBUG(
+ "Logging to exchange '~s' in vhost '~s' not ready, "
+ "trying again in ~b second(s)",
+ [Name, VHost, ?DECL_EXCHANGE_INTERVAL_SECS],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ receive
+ stop -> ok
+ after ?DECL_EXCHANGE_INTERVAL_SECS * 1000 ->
+ setup_proc(Config)
+ end
+ end.
+
+declare_exchange(
+ #{config := #{exchange := #resource{name = Name,
+ virtual_host = VHost} = Exchange}}) ->
+ try
+ %% Durable.
+ #exchange{} = rabbit_exchange:declare(
+ Exchange, topic, true, false, true, [],
+ ?INTERNAL_USER),
+ ?LOG_DEBUG(
+ "Declared exchange '~s' in vhost '~s'",
+ [Name, VHost],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ ok
+ catch
+ Class:Reason ->
+ ?LOG_DEBUG(
+ "Could not declare exchange '~s' in vhost '~s', "
+ "reason: ~0p:~0p",
+ [Name, VHost, Class, Reason],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ error
+ end.
+
+unconfigure_exchange(
+ #{config := #{exchange := #resource{name = Name,
+ virtual_host = VHost} = Exchange,
+ setup_proc := Pid}}) ->
+ Pid ! stop,
+ rabbit_exchange:delete(Exchange, false, ?INTERNAL_USER),
+ ?LOG_INFO(
+ "Logging to exchange '~s' in vhost '~s' disabled",
+ [Name, VHost],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}).
diff --git a/deps/rabbit/src/rabbit_looking_glass.erl b/deps/rabbit/src/rabbit_looking_glass.erl
index 00b1b6d46b..855d0adf49 100644
--- a/deps/rabbit/src/rabbit_looking_glass.erl
+++ b/deps/rabbit/src/rabbit_looking_glass.erl
@@ -2,21 +2,32 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_looking_glass).
--ignore_xref([{lg, trace, 4}]).
+-ignore_xref([
+ {lg, trace, 4},
+ {lg_callgrind, profile_many, 3}
+]).
-ignore_xref([{maps, from_list, 1}]).
-export([boot/0]).
+-export([trace/1, profile/0, profile/1]).
-export([connections/0]).
boot() ->
case os:getenv("RABBITMQ_TRACER") of
false ->
ok;
+ On when On =:= "1" orelse On =:= "true" ->
+ rabbit_log:info("Loading Looking Glass profiler for interactive use"),
+ case application:ensure_all_started(looking_glass) of
+ {ok, _} -> ok;
+ {error, Error} ->
+ rabbit_log:error("Failed to start Looking Glass, reason: ~p", [Error])
+ end;
Value ->
Input = parse_value(Value),
rabbit_log:info(
@@ -37,6 +48,27 @@ boot() ->
)
end.
+trace(Input) ->
+ lg:trace(Input,
+ lg_file_tracer,
+ "traces.lz4",
+ maps:from_list([
+ {mode, profile},
+ {process_dump, true},
+ {running, true},
+ {send, true}]
+ )).
+
+profile() ->
+ profile("callgrind.out").
+
+profile(Filename) ->
+ lg_callgrind:profile_many("traces.lz4.*", Filename, #{running => true}).
+
+%%
+%% Implementation
+%%
+
parse_value(Value) ->
[begin
[Mod, Fun] = string:tokens(C, ":"),
diff --git a/deps/rabbit/src/rabbit_maintenance.erl b/deps/rabbit/src/rabbit_maintenance.erl
index e5434dc888..3f961e45a6 100644
--- a/deps/rabbit/src/rabbit_maintenance.erl
+++ b/deps/rabbit/src/rabbit_maintenance.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_maintenance).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([
is_enabled/0,
@@ -25,7 +25,7 @@
resume_all_client_listeners/0,
close_all_client_connections/0,
primary_replica_transfer_candidate_nodes/0,
- random_primary_replica_transfer_candidate_node/1,
+ random_primary_replica_transfer_candidate_node/2,
transfer_leadership_of_quorum_queues/1,
transfer_leadership_of_classic_mirrored_queues/1,
status_table_name/0,
@@ -67,28 +67,28 @@ is_enabled() ->
drain() ->
case is_enabled() of
true -> do_drain();
- false -> rabbit_log:warning("Feature flag `~s` is not enabled, draining is a no-op", [?FEATURE_FLAG])
+ false ->
+ rabbit_log:error("Feature flag '~s' is not enabled, cannot put this node under maintenance", [?FEATURE_FLAG]),
+ {error, rabbit_misc:format("Feature flag '~s' is not enabled, cannot put this node under maintenance", [?FEATURE_FLAG])}
end.
-spec do_drain() -> ok.
do_drain() ->
- rabbit_log:alert("This node is being put into maintenance (drain) mode"),
+ rabbit_log:warning("This node is being put into maintenance (drain) mode"),
mark_as_being_drained(),
rabbit_log:info("Marked this node as undergoing maintenance"),
suspend_all_client_listeners(),
- rabbit_log:alert("Suspended all listeners and will no longer accept client connections"),
+ rabbit_log:warning("Suspended all listeners and will no longer accept client connections"),
{ok, NConnections} = close_all_client_connections(),
%% allow plugins to react e.g. by closing their protocol connections
rabbit_event:notify(maintenance_connections_closed, #{
reason => <<"node is being put into maintenance">>
}),
- rabbit_log:alert("Closed ~b local client connections", [NConnections]),
+ rabbit_log:warning("Closed ~b local client connections", [NConnections]),
TransferCandidates = primary_replica_transfer_candidate_nodes(),
- ReadableCandidates = readable_candidate_list(TransferCandidates),
- rabbit_log:info("Node will transfer primary replicas of its queues to ~b peers: ~s",
- [length(TransferCandidates), ReadableCandidates]),
- transfer_leadership_of_classic_mirrored_queues(TransferCandidates),
+ %% Note: only QQ leadership is transferred because it is a reasonably quick thing to do a lot of queues
+ %% in the cluster, unlike with CMQs.
transfer_leadership_of_quorum_queues(TransferCandidates),
stop_local_quorum_queue_followers(),
@@ -96,7 +96,7 @@ do_drain() ->
rabbit_event:notify(maintenance_draining, #{
reason => <<"node is being put into maintenance">>
}),
- rabbit_log:alert("Node is ready to be shut down for maintenance or upgrade"),
+ rabbit_log:info("Node is ready to be shut down for maintenance or upgrade"),
ok.
@@ -104,16 +104,18 @@ do_drain() ->
revive() ->
case is_enabled() of
true -> do_revive();
- false -> rabbit_log:warning("Feature flag `~s` is not enabled, reviving is a no-op", [?FEATURE_FLAG])
+ false ->
+ rabbit_log:error("Feature flag '~s' is not enabled, cannot put this node out of maintenance", [?FEATURE_FLAG]),
+ {error, rabbit_misc:format("Feature flag '~s' is not enabled, cannot put this node out of maintenance", [?FEATURE_FLAG])}
end.
-spec do_revive() -> ok.
do_revive() ->
- rabbit_log:alert("This node is being revived from maintenance (drain) mode"),
+ rabbit_log:info("This node is being revived from maintenance (drain) mode"),
revive_local_quorum_queue_replicas(),
- rabbit_log:alert("Resumed all listeners and will accept client connections again"),
+ rabbit_log:info("Resumed all listeners and will accept client connections again"),
resume_all_client_listeners(),
- rabbit_log:alert("Resumed all listeners and will accept client connections again"),
+ rabbit_log:info("Resumed all listeners and will accept client connections again"),
unmark_as_being_drained(),
rabbit_log:info("Marked this node as back from maintenance and ready to serve clients"),
@@ -195,7 +197,7 @@ filter_out_drained_nodes_consistent_read(Nodes) ->
-spec suspend_all_client_listeners() -> rabbit_types:ok_or_error(any()).
%% Pauses all listeners on the current node except for
%% Erlang distribution (clustering and CLI tools).
- %% A respausedumed listener will not accept any new client connections
+ %% A resumed listener will not accept any new client connections
%% but previously established connections won't be interrupted.
suspend_all_client_listeners() ->
Listeners = rabbit_networking:node_client_listeners(node()),
@@ -238,7 +240,7 @@ transfer_leadership_of_quorum_queues(_TransferCandidates) ->
%% by simply shutting its local QQ replica (Ra server)
RaLeader = amqqueue:get_pid(Q),
rabbit_log:debug("Will stop Ra server ~p", [RaLeader]),
- case ra:stop_server(RaLeader) of
+ case rabbit_quorum_queue:stop_server(RaLeader) of
ok ->
rabbit_log:debug("Successfully stopped Ra server ~p", [RaLeader]);
{error, nodedown} ->
@@ -248,7 +250,12 @@ transfer_leadership_of_quorum_queues(_TransferCandidates) ->
rabbit_log:info("Leadership transfer for quorum queues hosted on this node has been initiated").
-spec transfer_leadership_of_classic_mirrored_queues([node()]) -> ok.
- transfer_leadership_of_classic_mirrored_queues([]) ->
+%% This function is no longer used by maintanence mode. We retain it in case
+%% classic mirrored queue leadership transfer would be reconsidered.
+%%
+%% With a lot of CMQs in a cluster, the transfer procedure can take prohibitively long
+%% for a pre-upgrade task.
+transfer_leadership_of_classic_mirrored_queues([]) ->
rabbit_log:warning("Skipping leadership transfer of classic mirrored queues: no candidate "
"(online, not under maintenance) nodes to transfer to!");
transfer_leadership_of_classic_mirrored_queues(TransferCandidates) ->
@@ -256,14 +263,16 @@ transfer_leadership_of_classic_mirrored_queues(TransferCandidates) ->
ReadableCandidates = readable_candidate_list(TransferCandidates),
rabbit_log:info("Will transfer leadership of ~b classic mirrored queues hosted on this node to these peer nodes: ~s",
[length(Queues), ReadableCandidates]),
-
[begin
Name = amqqueue:get_name(Q),
- case random_primary_replica_transfer_candidate_node(TransferCandidates) of
+ ExistingReplicaNodes = [node(Pid) || Pid <- amqqueue:get_sync_slave_pids(Q)],
+ rabbit_log:debug("Local ~s has replicas on nodes ~s",
+ [rabbit_misc:rs(Name), readable_candidate_list(ExistingReplicaNodes)]),
+ case random_primary_replica_transfer_candidate_node(TransferCandidates, ExistingReplicaNodes) of
{ok, Pick} ->
- rabbit_log:debug("Will transfer leadership of local queue ~s to node ~s",
+ rabbit_log:debug("Will transfer leadership of local ~s to node ~s",
[rabbit_misc:rs(Name), Pick]),
- case rabbit_mirror_queue_misc:transfer_leadership(Q, Pick) of
+ case rabbit_mirror_queue_misc:migrate_leadership_to_existing_replica(Q, Pick) of
{migrated, _} ->
rabbit_log:debug("Successfully transferred leadership of queue ~s to node ~s",
[rabbit_misc:rs(Name), Pick]);
@@ -291,7 +300,7 @@ stop_local_quorum_queue_followers() ->
{RegisteredName, _LeaderNode} = amqqueue:get_pid(Q),
RaNode = {RegisteredName, node()},
rabbit_log:debug("Will stop Ra server ~p", [RaNode]),
- case ra:stop_server(RaNode) of
+ case rabbit_quorum_queue:stop_server(RaNode) of
ok ->
rabbit_log:debug("Successfully stopped Ra server ~p", [RaNode]);
{error, nodedown} ->
@@ -300,18 +309,30 @@ stop_local_quorum_queue_followers() ->
end || Q <- Queues],
rabbit_log:info("Stopped all local replicas of quorum queues hosted on this node").
- -spec primary_replica_transfer_candidate_nodes() -> [node()].
+-spec primary_replica_transfer_candidate_nodes() -> [node()].
primary_replica_transfer_candidate_nodes() ->
filter_out_drained_nodes_consistent_read(rabbit_nodes:all_running() -- [node()]).
--spec random_primary_replica_transfer_candidate_node([node()]) -> {ok, node()} | undefined.
-random_primary_replica_transfer_candidate_node([]) ->
+-spec random_primary_replica_transfer_candidate_node([node()], [node()]) -> {ok, node()} | undefined.
+random_primary_replica_transfer_candidate_node([], _Preferred) ->
undefined;
-random_primary_replica_transfer_candidate_node(Candidates) ->
- Nth = erlang:phash2(erlang:monotonic_time(), length(Candidates)),
- Candidate = lists:nth(Nth + 1, Candidates),
+random_primary_replica_transfer_candidate_node(Candidates, PreferredNodes) ->
+ Overlap = sets:to_list(sets:intersection(sets:from_list(Candidates), sets:from_list(PreferredNodes))),
+ Candidate = case Overlap of
+ [] ->
+ %% Since ownership transfer is meant to be run only when we are sure
+ %% there are in-sync replicas to transfer to, this is an edge case.
+ %% We skip the transfer.
+ undefined;
+ Nodes ->
+ random_nth(Nodes)
+ end,
{ok, Candidate}.
+random_nth(Nodes) ->
+ Nth = erlang:phash2(erlang:monotonic_time(), length(Nodes)),
+ lists:nth(Nth + 1, Nodes).
+
revive_local_quorum_queue_replicas() ->
Queues = rabbit_amqqueue:list_local_followers(),
[begin
@@ -322,7 +343,7 @@ revive_local_quorum_queue_replicas() ->
{Prefix, _Node} = amqqueue:get_pid(Q),
RaServer = {Prefix, node()},
rabbit_log:debug("Will start Ra server ~p", [RaServer]),
- case ra:restart_server(RaServer) of
+ case rabbit_quorum_queue:restart_server(RaServer) of
ok ->
rabbit_log:debug("Successfully restarted Ra server ~p", [RaServer]);
{error, {already_started, _Pid}} ->
diff --git a/deps/rabbit/src/rabbit_memory_monitor.erl b/deps/rabbit/src/rabbit_memory_monitor.erl
index 5934a97cff..1b1c47c3fb 100644
--- a/deps/rabbit/src/rabbit_memory_monitor.erl
+++ b/deps/rabbit/src/rabbit_memory_monitor.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
diff --git a/deps/rabbit/src/rabbit_metrics.erl b/deps/rabbit/src/rabbit_metrics.erl
index 10418e3884..27c2b57cde 100644
--- a/deps/rabbit/src/rabbit_metrics.erl
+++ b/deps/rabbit/src/rabbit_metrics.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_metrics).
diff --git a/deps/rabbit/src/rabbit_mirror_queue_coordinator.erl b/deps/rabbit/src/rabbit_mirror_queue_coordinator.erl
index 91a7c3ddc8..00c1b62e93 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_coordinator.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_coordinator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_coordinator).
@@ -418,15 +418,21 @@ handle_pre_hibernate(State = #state { gm = GM }) ->
%% GM
%% ---------------------------------------------------------------------------
+-spec joined(args(), members()) -> callback_result().
+
joined([CPid], Members) ->
CPid ! {joined, self(), Members},
ok.
+-spec members_changed(args(), members(),members()) -> callback_result().
+
members_changed([_CPid], _Births, []) ->
ok;
members_changed([CPid], _Births, Deaths) ->
ok = gen_server2:cast(CPid, {gm_deaths, Deaths}).
+-spec handle_msg(args(), pid(), any()) -> callback_result().
+
handle_msg([CPid], _From, request_depth = Msg) ->
ok = gen_server2:cast(CPid, Msg);
handle_msg([CPid], _From, {ensure_monitoring, _Pids} = Msg) ->
@@ -445,6 +451,8 @@ handle_msg([_CPid], _From, {delete_and_terminate, _Reason}) ->
handle_msg([_CPid], _From, _Msg) ->
ok.
+-spec handle_terminate(args(), term()) -> any().
+
handle_terminate([CPid], Reason) ->
ok = gen_server2:cast(CPid, {delete_and_terminate, Reason}),
ok.
diff --git a/deps/rabbit/src/rabbit_mirror_queue_master.erl b/deps/rabbit/src/rabbit_mirror_queue_master.erl
index 71146e1ce2..e7980d44bc 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_master.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_master.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_master).
@@ -150,19 +150,20 @@ sync_mirrors(HandleInfo, EmitStats,
backing_queue_state = BQS }) ->
Log = fun (Fmt, Params) ->
rabbit_mirror_queue_misc:log_info(
- QName, "Synchronising: " ++ Fmt ++ "~n", Params)
+ QName, "Synchronising: " ++ Fmt ++ "", Params)
end,
Log("~p messages to synchronise", [BQ:len(BQS)]),
{ok, Q} = rabbit_amqqueue:lookup(QName),
SPids = amqqueue:get_slave_pids(Q),
SyncBatchSize = rabbit_mirror_queue_misc:sync_batch_size(Q),
- Log("batch size: ~p", [SyncBatchSize]),
+ SyncThroughput = rabbit_mirror_queue_misc:default_max_sync_throughput(),
+ log_mirror_sync_config(Log, SyncBatchSize, SyncThroughput),
Ref = make_ref(),
Syncer = rabbit_mirror_queue_sync:master_prepare(Ref, QName, Log, SPids),
gm:broadcast(GM, {sync_start, Ref, Syncer, SPids}),
S = fun(BQSN) -> State#state{backing_queue_state = BQSN} end,
case rabbit_mirror_queue_sync:master_go(
- Syncer, Ref, Log, HandleInfo, EmitStats, SyncBatchSize, BQ, BQS) of
+ Syncer, Ref, Log, HandleInfo, EmitStats, SyncBatchSize, SyncThroughput, BQ, BQS) of
{cancelled, BQS1} -> Log(" synchronisation cancelled ", []),
{ok, S(BQS1)};
{shutdown, R, BQS1} -> {stop, R, S(BQS1)};
@@ -173,6 +174,11 @@ sync_mirrors(HandleInfo, EmitStats,
{ok, S(BQS1)}
end.
+log_mirror_sync_config(Log, SyncBatchSize, 0) ->
+ Log("batch size: ~p", [SyncBatchSize]);
+log_mirror_sync_config(Log, SyncBatchSize, SyncThroughput) ->
+ Log("max batch size: ~p; max sync throughput: ~p bytes/s", [SyncBatchSize, SyncThroughput]).
+
terminate({shutdown, dropped} = Reason,
State = #state { backing_queue = BQ,
backing_queue_state = BQS }) ->
@@ -198,7 +204,7 @@ terminate(Reason,
true -> %% Remove the whole queue to avoid data loss
rabbit_mirror_queue_misc:log_warning(
QName, "Stopping all nodes on master shutdown since no "
- "synchronised mirror (replica) is available~n", []),
+ "synchronised mirror (replica) is available", []),
stop_all_slaves(Reason, State);
false -> %% Just let some other mirror take over.
ok
diff --git a/deps/rabbit/src/rabbit_mirror_queue_misc.erl b/deps/rabbit/src/rabbit_mirror_queue_misc.erl
index 02f590e2fb..6b1e25122f 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_misc.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_misc.erl
@@ -2,30 +2,32 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_misc).
-behaviour(rabbit_policy_validator).
+-include("amqqueue.hrl").
+
-export([remove_from_queue/3, on_vhost_up/1, add_mirrors/3,
report_deaths/4, store_updated_slaves/1,
initial_queue_node/2, suggested_queue_nodes/1, actual_queue_nodes/1,
is_mirrored/1, is_mirrored_ha_nodes/1,
update_mirrors/2, update_mirrors/1, validate_policy/1,
maybe_auto_sync/1, maybe_drop_master_after_sync/1,
- sync_batch_size/1, log_info/3, log_warning/3]).
+ sync_batch_size/1, default_max_sync_throughput/0,
+ log_info/3, log_warning/3]).
-export([stop_all_slaves/5]).
--export([sync_queue/1, cancel_sync_queue/1]).
+-export([sync_queue/1, cancel_sync_queue/1, queue_length/1]).
--export([transfer_leadership/2, queue_length/1, get_replicas/1]).
+-export([get_replicas/1, transfer_leadership/2, migrate_leadership_to_existing_replica/2]).
%% for testing only
-export([module/1]).
-include_lib("rabbit_common/include/rabbit.hrl").
--include("amqqueue.hrl").
-define(HA_NODES_MODULE, rabbit_mirror_queue_mode_nodes).
@@ -201,15 +203,16 @@ drop_mirror(QName, MirrorNode) ->
case rabbit_amqqueue:lookup(QName) of
{ok, Q} when ?is_amqqueue(Q) ->
Name = amqqueue:get_name(Q),
- QPid = amqqueue:get_pid(Q),
- SPids = amqqueue:get_slave_pids(Q),
- case [Pid || Pid <- [QPid | SPids], node(Pid) =:= MirrorNode] of
+ PrimaryPid = amqqueue:get_pid(Q),
+ MirrorPids = amqqueue:get_slave_pids(Q),
+ AllReplicaPids = [PrimaryPid | MirrorPids],
+ case [Pid || Pid <- AllReplicaPids, node(Pid) =:= MirrorNode] of
[] ->
{error, {queue_not_mirrored_on_node, MirrorNode}};
- [QPid] when SPids =:= [] ->
+ [PrimaryPid] when MirrorPids =:= [] ->
{error, cannot_drop_only_mirror};
[Pid] ->
- log_info(Name, "Dropping queue mirror on node ~p~n",
+ log_info(Name, "Dropping queue mirror on node ~p",
[MirrorNode]),
exit(Pid, {shutdown, dropped}),
{ok, dropped}
@@ -235,24 +238,22 @@ add_mirror(QName, MirrorNode, SyncMode) ->
case rabbit_vhost_sup_sup:get_vhost_sup(VHost, MirrorNode) of
{ok, _} ->
try
- SPid = rabbit_amqqueue_sup_sup:start_queue_process(
- MirrorNode, Q, slave),
- log_info(QName, "Adding mirror on node ~p: ~p~n",
- [MirrorNode, SPid]),
- rabbit_mirror_queue_slave:go(SPid, SyncMode)
+ MirrorPid = rabbit_amqqueue_sup_sup:start_queue_process(MirrorNode, Q, slave),
+ log_info(QName, "Adding mirror on node ~p: ~p", [MirrorNode, MirrorPid]),
+ rabbit_mirror_queue_slave:go(MirrorPid, SyncMode)
of
_ -> ok
catch
error:QError ->
log_warning(QName,
"Unable to start queue mirror on node '~p'. "
- "Target queue supervisor is not running: ~p~n",
+ "Target queue supervisor is not running: ~p",
[MirrorNode, QError])
end;
{error, Error} ->
log_warning(QName,
"Unable to start queue mirror on node '~p'. "
- "Target virtual host is not running: ~p~n",
+ "Target virtual host is not running: ~p",
[MirrorNode, Error]),
ok
end
@@ -264,10 +265,10 @@ add_mirror(QName, MirrorNode, SyncMode) ->
report_deaths(_MirrorPid, _IsMaster, _QueueName, []) ->
ok;
report_deaths(MirrorPid, IsMaster, QueueName, DeadPids) ->
- log_info(QueueName, "~s ~s saw deaths of mirrors~s~n",
+ log_info(QueueName, "~s replica of queue ~s detected replica ~s to be down",
[case IsMaster of
- true -> "Master";
- false -> "Slave"
+ true -> "Primary";
+ false -> "Secondary"
end,
rabbit_misc:pid_to_string(MirrorPid),
[[$ , rabbit_misc:pid_to_string(P)] || P <- DeadPids]]).
@@ -342,7 +343,7 @@ stop_all_slaves(Reason, SPids, QName, GM, WaitTimeout) ->
after WaitTimeout ->
rabbit_mirror_queue_misc:log_warning(
QName, "Missing 'DOWN' message from ~p in"
- " node ~p~n", [Pid, node(Pid)]),
+ " node ~p", [Pid, node(Pid)]),
[Pid | Acc]
end;
false ->
@@ -435,26 +436,29 @@ validate_mode(Mode) ->
-spec is_mirrored(amqqueue:amqqueue()) -> boolean().
is_mirrored(Q) ->
- case module(Q) of
+ MatchedByPolicy = case module(Q) of
{ok, _} -> true;
_ -> false
- end.
+ end,
+ MatchedByPolicy andalso (not rabbit_amqqueue:is_exclusive(Q)).
is_mirrored_ha_nodes(Q) ->
- case module(Q) of
+ MatchedByPolicy = case module(Q) of
{ok, ?HA_NODES_MODULE} -> true;
_ -> false
- end.
+ end,
+ MatchedByPolicy andalso (not rabbit_amqqueue:is_exclusive(Q)).
actual_queue_nodes(Q) when ?is_amqqueue(Q) ->
- MPid = amqqueue:get_pid(Q),
- SPids = amqqueue:get_slave_pids(Q),
- SSPids = amqqueue:get_sync_slave_pids(Q),
- Nodes = fun (L) -> [node(Pid) || Pid <- L] end,
- {case MPid of
- none -> none;
- _ -> node(MPid)
- end, Nodes(SPids), Nodes(SSPids)}.
+ PrimaryPid = amqqueue:get_pid(Q),
+ MirrorPids = amqqueue:get_slave_pids(Q),
+ InSyncMirrorPids = amqqueue:get_sync_slave_pids(Q),
+ CollectNodes = fun (L) -> [node(Pid) || Pid <- L] end,
+ NodeHostingPrimary = case PrimaryPid of
+ none -> none;
+ _ -> node(PrimaryPid)
+ end,
+ {NodeHostingPrimary, CollectNodes(MirrorPids), CollectNodes(InSyncMirrorPids)}.
-spec maybe_auto_sync(amqqueue:amqqueue()) -> 'ok'.
@@ -503,6 +507,25 @@ default_batch_size() ->
rabbit_misc:get_env(rabbit, mirroring_sync_batch_size,
?DEFAULT_BATCH_SIZE).
+-define(DEFAULT_MAX_SYNC_THROUGHPUT, 0).
+
+default_max_sync_throughput() ->
+ case application:get_env(rabbit, mirroring_sync_max_throughput) of
+ {ok, Value} ->
+ case rabbit_resource_monitor_misc:parse_information_unit(Value) of
+ {ok, ParsedThroughput} ->
+ ParsedThroughput;
+ {error, parse_error} ->
+ rabbit_log:warning(
+ "The configured value for the mirroring_sync_max_throughput is "
+ "not a valid value: ~p. Disabled sync throughput control. ",
+ [Value]),
+ ?DEFAULT_MAX_SYNC_THROUGHPUT
+ end;
+ undefined ->
+ ?DEFAULT_MAX_SYNC_THROUGHPUT
+ end.
+
-spec update_mirrors
(amqqueue:amqqueue(), amqqueue:amqqueue()) -> 'ok'.
@@ -520,19 +543,19 @@ update_mirrors(OldQ, NewQ) when ?amqqueue_pids_are_equal(OldQ, NewQ) ->
update_mirrors(Q) when ?is_amqqueue(Q) ->
QName = amqqueue:get_name(Q),
- {OldMNode, OldSNodes, _} = actual_queue_nodes(Q),
- {NewMNode, NewSNodes} = suggested_queue_nodes(Q),
- OldNodes = [OldMNode | OldSNodes],
- NewNodes = [NewMNode | NewSNodes],
+ {PreTransferPrimaryNode, PreTransferMirrorNodes, __PreTransferInSyncMirrorNodes} = actual_queue_nodes(Q),
+ {NewlySelectedPrimaryNode, NewlySelectedMirrorNodes} = suggested_queue_nodes(Q),
+ PreTransferNodesWithReplicas = [PreTransferPrimaryNode | PreTransferMirrorNodes],
+ NewlySelectedNodesWithReplicas = [NewlySelectedPrimaryNode | NewlySelectedMirrorNodes],
%% When a mirror dies, remove_from_queue/2 might have to add new
- %% mirrors (in "exactly" mode). It will check mnesia to see which
+ %% mirrors (in "exactly" mode). It will check the queue record to see which
%% mirrors there currently are. If drop_mirror/2 is invoked first
%% then when we end up in remove_from_queue/2 it will not see the
%% mirrors that add_mirror/2 will add, and also want to add them
%% (even though we are not responding to the death of a
%% mirror). Breakage ensues.
- add_mirrors (QName, NewNodes -- OldNodes, async),
- drop_mirrors(QName, OldNodes -- NewNodes),
+ add_mirrors(QName, NewlySelectedNodesWithReplicas -- PreTransferNodesWithReplicas, async),
+ drop_mirrors(QName, PreTransferNodesWithReplicas -- NewlySelectedNodesWithReplicas),
%% This is for the case where no extra nodes were added but we changed to
%% a policy requiring auto-sync.
maybe_auto_sync(Q),
@@ -543,38 +566,92 @@ queue_length(Q) ->
M.
get_replicas(Q) ->
- {MNode, SNodes} = suggested_queue_nodes(Q),
- [MNode] ++ SNodes.
+ {PrimaryNode, MirrorNodes} = suggested_queue_nodes(Q),
+ [PrimaryNode] ++ MirrorNodes.
+-spec transfer_leadership(amqqueue:amqqueue(), node()) -> {migrated, node()} | {not_migrated, atom()}.
+%% Moves the primary replica (leader) of a classic mirrored queue to another node.
+%% Target node can be any node in the cluster, and does not have to host a replica
+%% of this queue.
transfer_leadership(Q, Destination) ->
QName = amqqueue:get_name(Q),
- {OldMNode, OldSNodes, _} = actual_queue_nodes(Q),
- OldNodes = [OldMNode | OldSNodes],
- add_mirrors(QName, [Destination] -- OldNodes, async),
- drop_mirrors(QName, OldNodes -- [Destination]),
- {Result, NewQ} = wait_for_new_master(QName, Destination),
- update_mirrors(NewQ),
- Result.
+ {PreTransferPrimaryNode, PreTransferMirrorNodes, _PreTransferInSyncMirrorNodes} = actual_queue_nodes(Q),
+ PreTransferNodesWithReplicas = [PreTransferPrimaryNode | PreTransferMirrorNodes],
+
+ NodesToAddMirrorsOn = [Destination] -- PreTransferNodesWithReplicas,
+ %% This will wait for the transfer/eager sync to finish before we begin dropping
+ %% mirrors on the next step. In this case we cannot add mirrors asynchronously
+ %% as that will race with the dropping step.
+ add_mirrors(QName, NodesToAddMirrorsOn, sync),
+
+ NodesToDropMirrorsOn = PreTransferNodesWithReplicas -- [Destination],
+ drop_mirrors(QName, NodesToDropMirrorsOn),
+
+ case wait_for_new_master(QName, Destination) of
+ not_migrated ->
+ {not_migrated, undefined};
+ {{not_migrated, Destination} = Result, _Q1} ->
+ Result;
+ {Result, NewQ} ->
+ update_mirrors(NewQ),
+ Result
+ end.
+
+
+-spec migrate_leadership_to_existing_replica(amqqueue:amqqueue(), atom()) -> {migrated, node()} | {not_migrated, atom()}.
+%% Moves the primary replica (leader) of a classic mirrored queue to another node
+%% which already hosts a replica of this queue. In this case we can stop
+%% fewer replicas and reduce the load the operation has on the cluster.
+migrate_leadership_to_existing_replica(Q, Destination) ->
+ QName = amqqueue:get_name(Q),
+ {PreTransferPrimaryNode, PreTransferMirrorNodes, _PreTransferInSyncMirrorNodes} = actual_queue_nodes(Q),
+ PreTransferNodesWithReplicas = [PreTransferPrimaryNode | PreTransferMirrorNodes],
+
+ NodesToAddMirrorsOn = [Destination] -- PreTransferNodesWithReplicas,
+ %% This will wait for the transfer/eager sync to finish before we begin dropping
+ %% mirrors on the next step. In this case we cannot add mirrors asynchronously
+ %% as that will race with the dropping step.
+ add_mirrors(QName, NodesToAddMirrorsOn, sync),
+
+ NodesToDropMirrorsOn = [PreTransferPrimaryNode],
+ drop_mirrors(QName, NodesToDropMirrorsOn),
+
+ case wait_for_new_master(QName, Destination) of
+ not_migrated ->
+ {not_migrated, undefined};
+ {{not_migrated, Destination} = Result, _Q1} ->
+ Result;
+ {Result, NewQ} ->
+ update_mirrors(NewQ),
+ Result
+ end.
+-spec wait_for_new_master(rabbit_amqqueue:name(), atom()) -> {{migrated, node()}, amqqueue:amqqueue()} | {{not_migrated, node()}, amqqueue:amqqueue()} | not_migrated.
wait_for_new_master(QName, Destination) ->
wait_for_new_master(QName, Destination, 100).
wait_for_new_master(QName, _, 0) ->
- {ok, Q} = rabbit_amqqueue:lookup(QName),
- {{not_migrated, ""}, Q};
+ case rabbit_amqqueue:lookup(QName) of
+ {error, not_found} -> not_migrated;
+ {ok, Q} -> {{not_migrated, undefined}, Q}
+ end;
wait_for_new_master(QName, Destination, N) ->
- {ok, Q} = rabbit_amqqueue:lookup(QName),
- case amqqueue:get_pid(Q) of
- none ->
- timer:sleep(100),
- wait_for_new_master(QName, Destination, N - 1);
- Pid ->
- case node(Pid) of
- Destination ->
- {{migrated, Destination}, Q};
- _ ->
+ case rabbit_amqqueue:lookup(QName) of
+ {error, not_found} ->
+ not_migrated;
+ {ok, Q} ->
+ case amqqueue:get_pid(Q) of
+ none ->
timer:sleep(100),
- wait_for_new_master(QName, Destination, N - 1)
+ wait_for_new_master(QName, Destination, N - 1);
+ Pid ->
+ case node(Pid) of
+ Destination ->
+ {{migrated, Destination}, Q};
+ _ ->
+ timer:sleep(100),
+ wait_for_new_master(QName, Destination, N - 1)
+ end
end
end.
diff --git a/deps/rabbit/src/rabbit_mirror_queue_mode.erl b/deps/rabbit/src/rabbit_mirror_queue_mode.erl
index 91491efc49..78c1aad8e1 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_mode.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_mode.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_mode).
diff --git a/deps/rabbit/src/rabbit_mirror_queue_mode_all.erl b/deps/rabbit/src/rabbit_mirror_queue_mode_all.erl
index 2da12a5972..ce3e128de1 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_mode_all.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_mode_all.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_mode_all).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_mirror_queue_mode).
diff --git a/deps/rabbit/src/rabbit_mirror_queue_mode_exactly.erl b/deps/rabbit/src/rabbit_mirror_queue_mode_exactly.erl
index a8aa7546ac..df6735ecd9 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_mode_exactly.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_mode_exactly.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_mode_exactly).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_mirror_queue_mode).
diff --git a/deps/rabbit/src/rabbit_mirror_queue_mode_nodes.erl b/deps/rabbit/src/rabbit_mirror_queue_mode_nodes.erl
index f3e134ba63..3d9ff5d87b 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_mode_nodes.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_mode_nodes.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_mode_nodes).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(rabbit_mirror_queue_mode).
diff --git a/deps/rabbit/src/rabbit_mirror_queue_slave.erl b/deps/rabbit/src/rabbit_mirror_queue_slave.erl
index 0480db9cfe..f8beb77367 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_slave.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_slave.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_slave).
@@ -137,7 +137,7 @@ handle_go(Q0) when ?is_amqqueue(Q0) ->
{ok, State};
{stale, StalePid} ->
rabbit_mirror_queue_misc:log_warning(
- QName, "Detected stale HA master: ~p~n", [StalePid]),
+ QName, "Detected stale classic mirrored queue leader: ~p", [StalePid]),
gm:leave(GM),
{error, {stale_master_pid, StalePid}};
duplicate_live_master ->
@@ -189,7 +189,7 @@ init_it(Self, GM, Node, QName) ->
stop_pending_slaves(QName, Pids) ->
[begin
rabbit_mirror_queue_misc:log_warning(
- QName, "Detected a non-responsive classic queue mirror, stopping it: ~p~n", [Pid]),
+ QName, "Detected a non-responsive classic queue mirror, stopping it: ~p", [Pid]),
case erlang:process_info(Pid, dictionary) of
undefined -> ok;
{dictionary, Dict} ->
@@ -385,8 +385,13 @@ handle_info({bump_credit, Msg}, State) ->
credit_flow:handle_bump_msg(Msg),
noreply(State);
-handle_info(bump_reduce_memory_use, State) ->
- noreply(State);
+handle_info(bump_reduce_memory_use, State = #state{backing_queue = BQ,
+ backing_queue_state = BQS}) ->
+ BQS1 = BQ:handle_info(bump_reduce_memory_use, BQS),
+ BQS2 = BQ:resume(BQS1),
+ noreply(State#state{
+ backing_queue_state = BQS2
+ });
%% In the event of a short partition during sync we can detect the
%% master's 'death', drop out of sync, and then receive sync messages
@@ -478,8 +483,12 @@ format_message_queue(Opt, MQ) -> rabbit_misc:format_message_queue(Opt, MQ).
%% GM
%% ---------------------------------------------------------------------------
+-spec joined(args(), members()) -> callback_result().
+
joined([SPid], _Members) -> SPid ! {joined, self()}, ok.
+-spec members_changed(args(), members(),members()) -> callback_result().
+
members_changed([_SPid], _Births, []) ->
ok;
members_changed([ SPid], _Births, Deaths) ->
@@ -492,6 +501,8 @@ members_changed([ SPid], _Births, Deaths) ->
{promote, CPid} -> {become, rabbit_mirror_queue_coordinator, [CPid]}
end.
+-spec handle_msg(args(), pid(), any()) -> callback_result().
+
handle_msg([_SPid], _From, hibernate_heartbeat) ->
%% See rabbit_mirror_queue_coordinator:handle_pre_hibernate/1
ok;
@@ -518,6 +529,8 @@ handle_msg([SPid], _From, {sync_start, Ref, Syncer, SPids}) ->
handle_msg([SPid], _From, Msg) ->
ok = gen_server2:cast(SPid, {gm, Msg}).
+-spec handle_terminate(args(), term()) -> any().
+
handle_terminate([_SPid], _Reason) ->
ok.
@@ -633,7 +646,7 @@ promote_me(From, #state { q = Q0,
msg_id_status = MS,
known_senders = KS}) when ?is_amqqueue(Q0) ->
QName = amqqueue:get_name(Q0),
- rabbit_mirror_queue_misc:log_info(QName, "Promoting mirror ~s to master~n",
+ rabbit_mirror_queue_misc:log_info(QName, "Promoting mirror ~s to leader",
[rabbit_misc:pid_to_string(self())]),
Q1 = amqqueue:set_pid(Q0, self()),
DeathFun = rabbit_mirror_queue_master:sender_death_fun(),
diff --git a/deps/rabbit/src/rabbit_mirror_queue_sync.erl b/deps/rabbit/src/rabbit_mirror_queue_sync.erl
index a82ee05599..26554ece83 100644
--- a/deps/rabbit/src/rabbit_mirror_queue_sync.erl
+++ b/deps/rabbit/src/rabbit_mirror_queue_sync.erl
@@ -2,17 +2,22 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mirror_queue_sync).
-include_lib("rabbit_common/include/rabbit.hrl").
--export([master_prepare/4, master_go/8, slave/7, conserve_resources/3]).
+-export([master_prepare/4, master_go/9, slave/7, conserve_resources/3]).
+
+%% Export for UTs
+-export([maybe_master_batch_send/2, get_time_diff/3, append_to_acc/4]).
-define(SYNC_PROGRESS_INTERVAL, 1000000).
+-define(SYNC_THROUGHPUT_EVAL_INTERVAL_MILLIS, 50).
+
%% There are three processes around, the master, the syncer and the
%% slave(s). The syncer is an intermediary, linked to the master in
%% order to make sure we do not mess with the master's credit flow or
@@ -67,23 +72,24 @@ master_prepare(Ref, QName, Log, SPids) ->
rabbit_mirror_queue_master:stats_fun(),
rabbit_mirror_queue_master:stats_fun(),
non_neg_integer(),
+ non_neg_integer(),
bq(), bqs()) ->
{'already_synced', bqs()} | {'ok', bqs()} |
{'cancelled', bqs()} |
{'shutdown', any(), bqs()} |
{'sync_died', any(), bqs()}.
-master_go(Syncer, Ref, Log, HandleInfo, EmitStats, SyncBatchSize, BQ, BQS) ->
+master_go(Syncer, Ref, Log, HandleInfo, EmitStats, SyncBatchSize, SyncThroughput, BQ, BQS) ->
Args = {Syncer, Ref, Log, HandleInfo, EmitStats, rabbit_misc:get_parent()},
receive
{'EXIT', Syncer, normal} -> {already_synced, BQS};
{'EXIT', Syncer, Reason} -> {sync_died, Reason, BQS};
{ready, Syncer} -> EmitStats({syncing, 0}),
- master_batch_go0(Args, SyncBatchSize,
+ master_batch_go0(Args, SyncBatchSize, SyncThroughput,
BQ, BQS)
end.
-master_batch_go0(Args, BatchSize, BQ, BQS) ->
+master_batch_go0(Args, BatchSize, SyncThroughput, BQ, BQS) ->
FoldFun =
fun (Msg, MsgProps, Unacked, Acc) ->
Acc1 = append_to_acc(Msg, MsgProps, Unacked, Acc),
@@ -92,24 +98,27 @@ master_batch_go0(Args, BatchSize, BQ, BQS) ->
false -> {cont, Acc1}
end
end,
- FoldAcc = {[], 0, {0, BQ:depth(BQS)}, erlang:monotonic_time()},
+ FoldAcc = {[], 0, {0, erlang:monotonic_time(), SyncThroughput}, {0, BQ:depth(BQS)}, erlang:monotonic_time()},
bq_fold(FoldFun, FoldAcc, Args, BQ, BQS).
master_batch_send({Syncer, Ref, Log, HandleInfo, EmitStats, Parent},
- {Batch, I, {Curr, Len}, Last}) ->
+ {Batch, I, {TotalBytes, LastCheck, SyncThroughput}, {Curr, Len}, Last}) ->
T = maybe_emit_stats(Last, I, EmitStats, Log),
HandleInfo({syncing, I}),
handle_set_maximum_since_use(),
SyncMsg = {msgs, Ref, lists:reverse(Batch)},
- NewAcc = {[], I + length(Batch), {Curr, Len}, T},
+ NewAcc = {[], I + length(Batch), {TotalBytes, LastCheck, SyncThroughput}, {Curr, Len}, T},
master_send_receive(SyncMsg, NewAcc, Syncer, Ref, Parent).
%% Either send messages when we reach the last one in the queue or
%% whenever we have accumulated BatchSize messages.
-maybe_master_batch_send({_, _, {Len, Len}, _}, _BatchSize) ->
+maybe_master_batch_send({_, _, _, {Len, Len}, _}, _BatchSize) ->
+ true;
+maybe_master_batch_send({_, _, _, {Curr, _Len}, _}, BatchSize)
+ when Curr rem BatchSize =:= 0 ->
true;
-maybe_master_batch_send({_, _, {Curr, _Len}, _}, BatchSize)
- when Curr rem BatchSize =:= 0 ->
+maybe_master_batch_send({_, _, {TotalBytes, _, SyncThroughput}, {_Curr, _Len}, _}, _BatchSize)
+ when TotalBytes > SyncThroughput ->
true;
maybe_master_batch_send(_Acc, _BatchSize) ->
false.
@@ -121,8 +130,10 @@ bq_fold(FoldFun, FoldAcc, Args, BQ, BQS) ->
{_, BQS1} -> master_done(Args, BQS1)
end.
-append_to_acc(Msg, MsgProps, Unacked, {Batch, I, {Curr, Len}, T}) ->
- {[{Msg, MsgProps, Unacked} | Batch], I, {Curr + 1, Len}, T}.
+append_to_acc(Msg, MsgProps, Unacked, {Batch, I, {_, _, 0}, {Curr, Len}, T}) ->
+ {[{Msg, MsgProps, Unacked} | Batch], I, {0, 0, 0}, {Curr + 1, Len}, T};
+append_to_acc(Msg, MsgProps, Unacked, {Batch, I, {TotalBytes, LastCheck, SyncThroughput}, {Curr, Len}, T}) ->
+ {[{Msg, MsgProps, Unacked} | Batch], I, {TotalBytes + rabbit_basic:msg_size(Msg), LastCheck, SyncThroughput}, {Curr + 1, Len}, T}.
master_send_receive(SyncMsg, NewAcc, Syncer, Ref, Parent) ->
receive
@@ -131,11 +142,44 @@ master_send_receive(SyncMsg, NewAcc, Syncer, Ref, Parent) ->
gen_server2:reply(From, ok),
{stop, cancelled};
{next, Ref} -> Syncer ! SyncMsg,
- {cont, NewAcc};
+ {Msgs, I , {TotalBytes, LastCheck, SyncThroughput}, {Curr, Len}, T} = NewAcc,
+ {NewTotalBytes, NewLastCheck} = maybe_throttle_sync_throughput(TotalBytes, LastCheck, SyncThroughput),
+ {cont, {Msgs, I, {NewTotalBytes, NewLastCheck, SyncThroughput}, {Curr, Len}, T}};
{'EXIT', Parent, Reason} -> {stop, {shutdown, Reason}};
{'EXIT', Syncer, Reason} -> {stop, {sync_died, Reason}}
end.
+maybe_throttle_sync_throughput(_ , _, 0) ->
+ {0, erlang:monotonic_time()};
+maybe_throttle_sync_throughput(TotalBytes, LastCheck, SyncThroughput) ->
+ Interval = erlang:convert_time_unit(erlang:monotonic_time() - LastCheck, native, milli_seconds),
+ case Interval > ?SYNC_THROUGHPUT_EVAL_INTERVAL_MILLIS of
+ true -> maybe_pause_sync(TotalBytes, Interval, SyncThroughput),
+ {0, erlang:monotonic_time()}; %% reset TotalBytes counter and LastCheck.;
+ false -> {TotalBytes, LastCheck}
+ end.
+
+maybe_pause_sync(TotalBytes, Interval, SyncThroughput) ->
+ Delta = get_time_diff(TotalBytes, Interval, SyncThroughput),
+ pause_queue_sync(Delta).
+
+pause_queue_sync(0) ->
+ rabbit_log_mirroring:debug("Sync throughput is ok.");
+pause_queue_sync(Delta) ->
+ rabbit_log_mirroring:debug("Sync throughput exceeds threshold. Pause queue sync for ~p ms", [Delta]),
+ timer:sleep(Delta).
+
+%% Sync throughput computation:
+%% - Total bytes have been sent since last check: TotalBytes
+%% - Used/Elapsed time since last check: Interval (in milliseconds)
+%% - Effective/Used throughput in bytes/s: TotalBytes/Interval * 1000.
+%% - When UsedThroughput > SyncThroughput -> we need to slow down to compensate over-used rate.
+%% The amount of time to pause queue sync is the different between time needed to broadcast TotalBytes at max throughput
+%% and the elapsed time (Interval).
+get_time_diff(TotalBytes, Interval, SyncThroughput) ->
+ rabbit_log_mirroring:debug("Total ~p bytes has been sent over last ~p ms. Effective sync througput: ~p", [TotalBytes, Interval, round(TotalBytes * 1000 / Interval)]),
+ max(round(TotalBytes/SyncThroughput * 1000 - Interval), 0).
+
master_done({Syncer, Ref, _Log, _HandleInfo, _EmitStats, Parent}, BQS) ->
receive
{'$gen_call', From,
@@ -288,6 +332,9 @@ wait_for_credit(SPids) ->
end.
wait_for_resources(Ref, SPids) ->
+ erlang:garbage_collect(),
+ % Probably bump_reduce_memory_use messages should be handled here as well,
+ % otherwise the BQ is not pushing messages to disk
receive
{conserve_resources, memory, false} ->
SPids;
@@ -367,7 +414,11 @@ slave_sync_loop(Args = {Ref, MRef, Syncer, BQ, UpdateRamDuration, Parent},
%% If the master throws an exception
{'$gen_cast', {gm, {delete_and_terminate, Reason}}} ->
BQ:delete_and_terminate(Reason, BQS),
- {stop, Reason, {[], TRef, undefined}}
+ {stop, Reason, {[], TRef, undefined}};
+ bump_reduce_memory_use ->
+ BQS1 = BQ:handle_info(bump_reduce_memory_use, BQS),
+ BQS2 = BQ:resume(BQS1),
+ slave_sync_loop(Args, {MA, TRef, BQS2})
end.
%% We are partitioning messages by the Unacked element in the tuple.
diff --git a/deps/rabbit/src/rabbit_mnesia.erl b/deps/rabbit/src/rabbit_mnesia.erl
index 070c6a8205..216055103f 100644
--- a/deps/rabbit/src/rabbit_mnesia.erl
+++ b/deps/rabbit/src/rabbit_mnesia.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mnesia).
@@ -69,7 +69,7 @@ init() ->
case is_virgin_node() of
true ->
rabbit_log:info("Node database directory at ~ts is empty. "
- "Assuming we need to join an existing cluster or initialise from scratch...~n",
+ "Assuming we need to join an existing cluster or initialise from scratch...",
[dir()]),
rabbit_peer_discovery:log_configured_backend(),
rabbit_peer_discovery:maybe_init(),
@@ -105,21 +105,18 @@ init_with_lock(Retries, Timeout, RunPeerDiscovery) ->
rabbit_log:debug("rabbit_peer_discovery:lock returned ~p", [LockResult]),
case LockResult of
not_supported ->
- rabbit_log:info("Peer discovery backend does not support locking, falling back to randomized delay"),
- %% See rabbitmq/rabbitmq-server#1202 for details.
- rabbit_peer_discovery:maybe_inject_randomized_delay(),
RunPeerDiscovery(),
rabbit_peer_discovery:maybe_register();
- {error, _Reason} ->
- timer:sleep(Timeout),
- init_with_lock(Retries - 1, Timeout, RunPeerDiscovery);
{ok, Data} ->
try
RunPeerDiscovery(),
rabbit_peer_discovery:maybe_register()
after
rabbit_peer_discovery:unlock(Data)
- end
+ end;
+ {error, _Reason} ->
+ timer:sleep(Timeout),
+ init_with_lock(Retries - 1, Timeout, RunPeerDiscovery)
end.
-spec run_peer_discovery() -> ok | {[node()], node_type()}.
@@ -155,7 +152,7 @@ run_peer_discovery_with_retries(RetriesLeft, DelayInterval) ->
e(invalid_cluster_nodes_conf)
end,
DiscoveredNodes = lists:usort(DiscoveredNodes0),
- rabbit_log:info("All discovered existing cluster peers: ~s~n",
+ rabbit_log:info("All discovered existing cluster peers: ~s",
[rabbit_peer_discovery:format_discovered_nodes(DiscoveredNodes)]),
Peers = nodes_excl_me(DiscoveredNodes),
case Peers of
@@ -165,7 +162,7 @@ run_peer_discovery_with_retries(RetriesLeft, DelayInterval) ->
"Enabling debug logging might help troubleshoot."),
init_db_and_upgrade([node()], disc, false, _Retry = true);
_ ->
- rabbit_log:info("Peer nodes we can cluster with: ~s~n",
+ rabbit_log:info("Peer nodes we can cluster with: ~s",
[rabbit_peer_discovery:format_discovered_nodes(Peers)]),
join_discovered_peers(Peers, NodeType)
end.
@@ -178,22 +175,22 @@ join_discovered_peers(TryNodes, NodeType) ->
join_discovered_peers_with_retries(TryNodes, NodeType, RetriesLeft, DelayInterval).
join_discovered_peers_with_retries(TryNodes, _NodeType, 0, _DelayInterval) ->
- rabbit_log:warning(
+ rabbit_log:info(
"Could not successfully contact any node of: ~s (as in Erlang distribution). "
- "Starting as a blank standalone node...~n",
+ "Starting as a blank standalone node...",
[string:join(lists:map(fun atom_to_list/1, TryNodes), ",")]),
init_db_and_upgrade([node()], disc, false, _Retry = true);
join_discovered_peers_with_retries(TryNodes, NodeType, RetriesLeft, DelayInterval) ->
case find_reachable_peer_to_cluster_with(nodes_excl_me(TryNodes)) of
{ok, Node} ->
- rabbit_log:info("Node '~s' selected for auto-clustering~n", [Node]),
+ rabbit_log:info("Node '~s' selected for auto-clustering", [Node]),
{ok, {_, DiscNodes, _}} = discover_cluster0(Node),
init_db_and_upgrade(DiscNodes, NodeType, true, _Retry = true),
rabbit_connection_tracking:boot(),
rabbit_node_monitor:notify_joined_cluster();
none ->
RetriesLeft1 = RetriesLeft - 1,
- rabbit_log:error("Trying to join discovered peers failed. Will retry after a delay of ~b ms, ~b retries left...",
+ rabbit_log:info("Trying to join discovered peers failed. Will retry after a delay of ~b ms, ~b retries left...",
[DelayInterval, RetriesLeft1]),
timer:sleep(DelayInterval),
join_discovered_peers_with_retries(TryNodes, NodeType, RetriesLeft1, DelayInterval)
@@ -237,7 +234,7 @@ join_cluster(DiscoveryNode, NodeType) ->
reset_gracefully(),
%% Join the cluster
- rabbit_log:info("Clustering with ~p as ~p node~n",
+ rabbit_log:info("Clustering with ~p as ~p node",
[ClusterNodes, NodeType]),
ok = init_db_with_mnesia(ClusterNodes, NodeType,
true, true, _Retry = true),
@@ -252,7 +249,7 @@ join_cluster(DiscoveryNode, NodeType) ->
%% do we think so ourselves?
case are_we_clustered_with(DiscoveryNode) of
true ->
- rabbit_log:info("Asked to join a cluster but already a member of it: ~p~n", [ClusterNodes]),
+ rabbit_log:info("Asked to join a cluster but already a member of it: ~p", [ClusterNodes]),
{ok, already_member};
false ->
Msg = format_inconsistent_cluster_message(DiscoveryNode, node()),
@@ -269,14 +266,14 @@ join_cluster(DiscoveryNode, NodeType) ->
reset() ->
ensure_mnesia_not_running(),
- rabbit_log:info("Resetting Rabbit~n", []),
+ rabbit_log:info("Resetting Rabbit", []),
reset_gracefully().
-spec force_reset() -> 'ok'.
force_reset() ->
ensure_mnesia_not_running(),
- rabbit_log:info("Resetting Rabbit forcefully~n", []),
+ rabbit_log:info("Resetting Rabbit forcefully", []),
wipe().
reset_gracefully() ->
@@ -336,7 +333,7 @@ update_cluster_nodes(DiscoveryNode) ->
%% nodes
mnesia:delete_schema([node()]),
rabbit_node_monitor:write_cluster_status(Status),
- rabbit_log:info("Updating cluster nodes from ~p~n",
+ rabbit_log:info("Updating cluster nodes from ~p",
[DiscoveryNode]),
init_db_with_mnesia(AllNodes, node_type(), true, true, _Retry = false);
false ->
@@ -367,7 +364,7 @@ forget_cluster_node(Node, RemoveWhenOffline, EmitNodeDeletedEvent) ->
{true, true} -> e(online_node_offline_flag);
{false, false} -> e(offline_node_no_offline_flag);
{false, true} -> rabbit_log:info(
- "Removing node ~p from cluster~n", [Node]),
+ "Removing node ~p from cluster", [Node]),
case remove_node_if_mnesia_running(Node) of
ok when EmitNodeDeletedEvent ->
rabbit_event:notify(node_deleted, [{node, Node}]),
@@ -814,7 +811,7 @@ schema_ok_or_move() ->
%% started yet
rabbit_log:warning("schema integrity check failed: ~p~n"
"moving database to backup location "
- "and recreating schema from scratch~n",
+ "and recreating schema from scratch",
[Reason]),
ok = move_db(),
ok = create_schema()
@@ -848,7 +845,7 @@ move_db() ->
ok ->
%% NB: we cannot use rabbit_log here since it may not have
%% been started yet
- rabbit_log:warning("moved database from ~s to ~s~n",
+ rabbit_log:warning("moved database from ~s to ~s",
[MnesiaDir, BackupDir]),
ok;
{error, Reason} -> throw({error, {cannot_backup_mnesia,
@@ -895,7 +892,7 @@ leave_cluster(Node) ->
end.
wait_for(Condition) ->
- rabbit_log:info("Waiting for ~p...~n", [Condition]),
+ rabbit_log:info("Waiting for ~p...", [Condition]),
timer:sleep(1000).
start_mnesia(CheckConsistency) ->
@@ -1019,6 +1016,7 @@ is_virgin_node() ->
IgnoredFiles0 =
[rabbit_node_monitor:cluster_status_filename(),
rabbit_node_monitor:running_nodes_filename(),
+ rabbit_node_monitor:coordination_filename(),
rabbit_node_monitor:default_quorum_filename(),
rabbit_node_monitor:quorum_filename(),
rabbit_feature_flags:enabled_feature_flags_list_file()],
@@ -1040,15 +1038,15 @@ find_reachable_peer_to_cluster_with([Node | Nodes]) ->
end,
case remote_node_info(Node) of
{badrpc, _} = Reason ->
- Fail("~p~n", [Reason]);
+ Fail("~p", [Reason]);
%% old delegate hash check
{_OTP, RMQ, Hash, _} when is_binary(Hash) ->
- Fail("version ~s~n", [RMQ]);
+ Fail("version ~s", [RMQ]);
{_OTP, _RMQ, _Protocol, {error, _} = E} ->
- Fail("~p~n", [E]);
+ Fail("~p", [E]);
{OTP, RMQ, Protocol, _} ->
case check_consistency(Node, OTP, RMQ, Protocol) of
- {error, _} -> Fail("versions ~p~n",
+ {error, _} -> Fail("versions ~p",
[{OTP, RMQ}]);
ok -> {ok, Node}
end
diff --git a/deps/rabbit/src/rabbit_mnesia_rename.erl b/deps/rabbit/src/rabbit_mnesia_rename.erl
index e0d88c0f5e..842662db42 100644
--- a/deps/rabbit/src/rabbit_mnesia_rename.erl
+++ b/deps/rabbit/src/rabbit_mnesia_rename.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mnesia_rename).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([rename/2]).
-export([maybe_finish/1]).
@@ -100,7 +100,7 @@ prepare(Node, NodeMapList) ->
%% Check that we are in the cluster, all old nodes are in the
%% cluster, and no new nodes are.
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ Nodes = rabbit_nodes:all(),
case {FromNodes -- Nodes, ToNodes -- (ToNodes -- Nodes),
lists:member(Node, Nodes ++ ToNodes)} of
{[], [], true} -> ok;
@@ -144,7 +144,7 @@ finish(FromNode, ToNode, AllNodes) ->
end;
FromNode ->
rabbit_log:info(
- "Abandoning rename from ~s to ~s since we are still ~s~n",
+ "Abandoning rename from ~s to ~s since we are still ~s",
[FromNode, ToNode, FromNode]),
[{ok, _} = file:copy(backup_of_conf(F), F) || F <- config_files()],
ok = rabbit_file:recursive_delete([rabbit_mnesia:dir()]),
@@ -155,18 +155,18 @@ finish(FromNode, ToNode, AllNodes) ->
%% Boot will almost certainly fail but we might as
%% well just log this
rabbit_log:info(
- "Rename attempted from ~s to ~s but we are ~s - ignoring.~n",
+ "Rename attempted from ~s to ~s but we are ~s - ignoring.",
[FromNode, ToNode, node()])
end.
finish_primary(FromNode, ToNode) ->
- rabbit_log:info("Restarting as primary after rename from ~s to ~s~n",
+ rabbit_log:info("Restarting as primary after rename from ~s to ~s",
[FromNode, ToNode]),
delete_rename_files(),
ok.
finish_secondary(FromNode, ToNode, AllNodes) ->
- rabbit_log:info("Restarting as secondary after rename from ~s to ~s~n",
+ rabbit_log:info("Restarting as secondary after rename from ~s to ~s",
[FromNode, ToNode]),
rabbit_upgrade:secondary_upgrade(AllNodes),
rename_in_running_mnesia(FromNode, ToNode),
@@ -232,7 +232,7 @@ update_term(_NodeMap, Term) ->
Term.
rename_in_running_mnesia(FromNode, ToNode) ->
- All = rabbit_mnesia:cluster_nodes(all),
+ All = rabbit_nodes:all(),
Running = rabbit_nodes:all_running(),
case {lists:member(FromNode, Running), lists:member(ToNode, All)} of
{false, true} -> ok;
diff --git a/deps/rabbit/src/rabbit_msg_file.erl b/deps/rabbit/src/rabbit_msg_file.erl
index 1a24f690a0..f87c88f849 100644
--- a/deps/rabbit/src/rabbit_msg_file.erl
+++ b/deps/rabbit/src/rabbit_msg_file.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_msg_file).
@@ -11,7 +11,7 @@
%%----------------------------------------------------------------------------
--include("rabbit_msg_store.hrl").
+-include_lib("rabbit_common/include/rabbit_msg_store.hrl").
-define(INTEGER_SIZE_BYTES, 8).
-define(INTEGER_SIZE_BITS, (8 * ?INTEGER_SIZE_BYTES)).
diff --git a/deps/rabbit/src/rabbit_msg_record.erl b/deps/rabbit/src/rabbit_msg_record.erl
index 3ebe14cb9f..7520788d28 100644
--- a/deps/rabbit/src/rabbit_msg_record.erl
+++ b/deps/rabbit/src/rabbit_msg_record.erl
@@ -10,8 +10,8 @@
message_annotation/3
]).
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
-include_lib("amqp10_common/include/amqp10_framing.hrl").
-type maybe(T) :: T | undefined.
@@ -195,7 +195,7 @@ from_amqp091(#'P_basic'{message_id = MsgId,
<- case Headers of
undefined -> [];
_ -> Headers
- end],
+ end, not unsupported_header_value_type(T)],
%% properties that do not map directly to AMQP 1.0 properties are stored
%% in application properties
APC = map_add(utf8, <<"x-basic-type">>, utf8, Type,
@@ -395,6 +395,13 @@ message_id({utf8, S}, HKey, H0) ->
message_id(MsgId, _, H) ->
{H, unwrap(MsgId)}.
+ unsupported_header_value_type(array) ->
+ true;
+ unsupported_header_value_type(table) ->
+ true;
+ unsupported_header_value_type(_) ->
+ false.
+
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
diff --git a/deps/rabbit/src/rabbit_msg_store.erl b/deps/rabbit/src/rabbit_msg_store.erl
index 4851e56248..3e4c70e5b2 100644
--- a/deps/rabbit/src/rabbit_msg_store.erl
+++ b/deps/rabbit/src/rabbit_msg_store.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_msg_store).
@@ -27,7 +27,7 @@
%%----------------------------------------------------------------------------
--include("rabbit_msg_store.hrl").
+-include_lib("rabbit_common/include/rabbit_msg_store.hrl").
-define(SYNC_INTERVAL, 25). %% milliseconds
-define(CLEAN_FILENAME, "clean.dot").
@@ -686,6 +686,13 @@ client_update_flying(Diff, MsgId, #client_msstate { flying_ets = FlyingEts,
false -> try ets:update_counter(FlyingEts, Key, {2, Diff}) of
0 -> ok;
Diff -> ok;
+ Err when Err >= 2 ->
+ %% The message must be referenced twice in the queue
+ %% index. There is a bug somewhere, but we don't want
+ %% to take down anything just because of this. Let's
+ %% process the message as if the copies were in
+ %% different queues (fan-out).
+ ok;
Err -> throw({bad_flying_ets_update, Diff, Err, Key})
catch error:badarg ->
%% this is guaranteed to succeed since the
@@ -717,7 +724,7 @@ init([Type, BaseDir, ClientRefs, StartupFunState]) ->
Name = filename:join(filename:basename(BaseDir), atom_to_list(Type)),
{ok, IndexModule} = application:get_env(rabbit, msg_store_index_module),
- rabbit_log:info("Message store ~tp: using ~p to provide index~n", [Name, IndexModule]),
+ rabbit_log:info("Message store ~tp: using ~p to provide index", [Name, IndexModule]),
AttemptFileSummaryRecovery =
case ClientRefs of
@@ -794,11 +801,11 @@ init([Type, BaseDir, ClientRefs, StartupFunState]) ->
true -> "clean";
false -> "unclean"
end,
- rabbit_log:debug("Rebuilding message location index after ~s shutdown...~n",
+ rabbit_log:debug("Rebuilding message location index after ~s shutdown...",
[Cleanliness]),
{Offset, State1 = #msstate { current_file = CurFile }} =
build_index(CleanShutdown, StartupFunState, State),
- rabbit_log:debug("Finished rebuilding index~n", []),
+ rabbit_log:debug("Finished rebuilding index", []),
%% read is only needed so that we can seek
{ok, CurHdl} = open_file(Dir, filenum_to_name(CurFile),
[read | ?WRITE_MODE]),
@@ -999,7 +1006,7 @@ terminate(_Reason, State = #msstate { index_state = IndexState,
{error, FSErr} ->
rabbit_log:error("Unable to store file summary"
" for vhost message store for directory ~p~n"
- "Error: ~p~n",
+ "Error: ~p",
[Dir, FSErr])
end,
[true = ets:delete(T) || T <- [FileSummaryEts, FileHandlesEts,
@@ -1012,7 +1019,7 @@ terminate(_Reason, State = #msstate { index_state = IndexState,
ok;
{error, RTErr} ->
rabbit_log:error("Unable to save message store recovery terms"
- " for directory ~p~nError: ~p~n",
+ " for directory ~p~nError: ~p",
[Dir, RTErr])
end,
State3 #msstate { index_state = undefined,
@@ -1082,6 +1089,14 @@ update_flying(Diff, MsgId, CRef, #msstate { flying_ets = FlyingEts }) ->
process;
[{_, 0}] -> true = ets:delete_object(FlyingEts, {Key, 0}),
ignore;
+ [{_, Err}] when Err >= 2 ->
+ %% The message must be referenced twice in the queue index. There
+ %% is a bug somewhere, but we don't want to take down anything
+ %% just because of this. Let's process the message as if the
+ %% copies were in different queues (fan-out).
+ ets:update_counter(FlyingEts, Key, {2, Diff}),
+ true = ets:delete_object(FlyingEts, {Key, 0}),
+ process;
[{_, Err}] -> throw({bad_flying_ets_record, Diff, Err, Key})
end.
%% [1] We can get here, for example, in the following scenario: There
@@ -1574,12 +1589,12 @@ index_clean_up_temporary_reference_count_entries(
recover_index_and_client_refs(IndexModule, _Recover, undefined, Dir, _Name) ->
{false, IndexModule:new(Dir), []};
recover_index_and_client_refs(IndexModule, false, _ClientRefs, Dir, Name) ->
- rabbit_log:warning("Message store ~tp: rebuilding indices from scratch~n", [Name]),
+ rabbit_log:warning("Message store ~tp: rebuilding indices from scratch", [Name]),
{false, IndexModule:new(Dir), []};
recover_index_and_client_refs(IndexModule, true, ClientRefs, Dir, Name) ->
Fresh = fun (ErrorMsg, ErrorArgs) ->
rabbit_log:warning("Message store ~tp : " ++ ErrorMsg ++ "~n"
- "rebuilding indices from scratch~n",
+ "rebuilding indices from scratch",
[Name | ErrorArgs]),
{false, IndexModule:new(Dir), []}
end,
@@ -1741,9 +1756,9 @@ build_index(true, _StartupFunState,
end, {0, State}, FileSummaryEts);
build_index(false, {MsgRefDeltaGen, MsgRefDeltaGenInit},
State = #msstate { dir = Dir }) ->
- rabbit_log:debug("Rebuilding message refcount...~n", []),
+ rabbit_log:debug("Rebuilding message refcount...", []),
ok = count_msg_refs(MsgRefDeltaGen, MsgRefDeltaGenInit, State),
- rabbit_log:debug("Done rebuilding message refcount~n", []),
+ rabbit_log:debug("Done rebuilding message refcount", []),
{ok, Pid} = gatherer:start_link(),
case [filename_to_num(FileName) ||
FileName <- list_sorted_filenames(Dir, ?FILE_EXTENSION)] of
@@ -1757,7 +1772,7 @@ build_index(false, {MsgRefDeltaGen, MsgRefDeltaGenInit},
build_index_worker(Gatherer, State = #msstate { dir = Dir },
Left, File, Files) ->
FileName = filenum_to_name(File),
- rabbit_log:debug("Rebuilding message location index from ~p (~B file(s) remaining)~n",
+ rabbit_log:debug("Rebuilding message location index from ~p (~B file(s) remaining)",
[form_filename(Dir, FileName), length(Files)]),
{ok, Messages, FileSize} =
scan_file_for_valid_messages(Dir, FileName),
diff --git a/deps/rabbit/src/rabbit_msg_store_ets_index.erl b/deps/rabbit/src/rabbit_msg_store_ets_index.erl
index 294417b5ba..d3e4ac31ce 100644
--- a/deps/rabbit/src/rabbit_msg_store_ets_index.erl
+++ b/deps/rabbit/src/rabbit_msg_store_ets_index.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_msg_store_ets_index).
--include("rabbit_msg_store.hrl").
+-include_lib("rabbit_common/include/rabbit_msg_store.hrl").
-behaviour(rabbit_msg_store_index).
@@ -70,7 +70,7 @@ terminate(#state { table = MsgLocations, dir = Dir }) ->
ok -> ok;
{error, Err} ->
rabbit_log:error("Unable to save message store index"
- " for directory ~p.~nError: ~p~n",
+ " for directory ~p.~nError: ~p",
[Dir, Err])
end,
ets:delete(MsgLocations).
diff --git a/deps/rabbit/src/rabbit_msg_store_gc.erl b/deps/rabbit/src/rabbit_msg_store_gc.erl
index 41addc5fa6..5430f33530 100644
--- a/deps/rabbit/src/rabbit_msg_store_gc.erl
+++ b/deps/rabbit/src/rabbit_msg_store_gc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_msg_store_gc).
@@ -22,7 +22,7 @@
msg_store_state
}).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
diff --git a/deps/rabbit/src/rabbit_networking.erl b/deps/rabbit/src/rabbit_networking.erl
index 433b1d7540..da4d542c29 100644
--- a/deps/rabbit/src/rabbit_networking.erl
+++ b/deps/rabbit/src/rabbit_networking.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_networking).
@@ -19,7 +19,8 @@
%%
%% See also tcp_listener_sup and tcp_listener.
--export([boot/0, start_tcp_listener/2, start_ssl_listener/3,
+-export([boot/0, start_tcp_listener/2, start_tcp_listener/3,
+ start_ssl_listener/3, start_ssl_listener/4,
stop_tcp_listener/1, on_node_down/1, active_listeners/0,
node_listeners/1, node_client_listeners/1,
register_connection/1, unregister_connection/1,
@@ -29,13 +30,15 @@
connection_info_all/0, connection_info_all/1,
emit_connection_info_all/4, emit_connection_info_local/3,
close_connection/2, close_connections/2, close_all_connections/1,
+ close_all_user_connections/2,
force_connection_event_refresh/1, force_non_amqp_connection_event_refresh/1,
handshake/2, tcp_host/1,
ranch_ref/1, ranch_ref/2, ranch_ref_of_protocol/1,
listener_of_protocol/1, stop_ranch_listener_of_protocol/1]).
%% Used by TCP-based transports, e.g. STOMP adapter
--export([tcp_listener_addresses/1, tcp_listener_spec/9,
+-export([tcp_listener_addresses/1,
+ tcp_listener_spec/9, tcp_listener_spec/10,
ensure_ssl/0, fix_ssl_options/1, poodle_check/1]).
-export([tcp_listener_started/4, tcp_listener_stopped/4]).
@@ -49,8 +52,8 @@
connections_local/0
]).
--include("rabbit.hrl").
--include("rabbit_misc.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_misc.hrl").
%% IANA-suggested ephemeral port range is 49152 to 65535
-define(FIRST_TEST_BIND_PORT, 49152).
@@ -78,12 +81,14 @@ boot() ->
_ = application:start(ranch),
rabbit_log:debug("Started Ranch"),
%% Failures will throw exceptions
- _ = boot_listeners(fun boot_tcp/1, application:get_env(rabbit, num_tcp_acceptors, 10), "TCP"),
- _ = boot_listeners(fun boot_tls/1, application:get_env(rabbit, num_ssl_acceptors, 10), "TLS"),
+ _ = boot_listeners(fun boot_tcp/2, application:get_env(rabbit, num_tcp_acceptors, 10),
+ application:get_env(rabbit, num_conns_sups, 1), "TCP"),
+ _ = boot_listeners(fun boot_tls/2, application:get_env(rabbit, num_ssl_acceptors, 10),
+ application:get_env(rabbit, num_conns_sups, 1), "TLS"),
ok.
-boot_listeners(Fun, NumAcceptors, Type) ->
- case Fun(NumAcceptors) of
+boot_listeners(Fun, NumAcceptors, ConcurrentConnsSupsCount, Type) ->
+ case Fun(NumAcceptors, ConcurrentConnsSupsCount) of
ok ->
ok;
{error, {could_not_start_listener, Address, Port, Details}} = Error ->
@@ -92,10 +97,10 @@ boot_listeners(Fun, NumAcceptors, Type) ->
throw(Error)
end.
-boot_tcp(NumAcceptors) ->
+boot_tcp(NumAcceptors, ConcurrentConnsSupsCount) ->
{ok, TcpListeners} = application:get_env(tcp_listeners),
case lists:foldl(fun(Listener, ok) ->
- start_tcp_listener(Listener, NumAcceptors);
+ start_tcp_listener(Listener, NumAcceptors, ConcurrentConnsSupsCount);
(_Listener, Error) ->
Error
end,
@@ -104,14 +109,15 @@ boot_tcp(NumAcceptors) ->
{error, _} = Error -> Error
end.
-boot_tls(NumAcceptors) ->
+boot_tls(NumAcceptors, ConcurrentConnsSupsCount) ->
case application:get_env(ssl_listeners) of
{ok, []} ->
ok;
{ok, SslListeners} ->
SslOpts = ensure_ssl(),
case poodle_check('AMQP') of
- ok -> [start_ssl_listener(L, SslOpts, NumAcceptors) || L <- SslListeners];
+ ok -> [start_ssl_listener(L, SslOpts, NumAcceptors, ConcurrentConnsSupsCount)
+ || L <- SslListeners];
danger -> ok
end,
ok
@@ -149,7 +155,7 @@ log_poodle_fail(Context) ->
"better.~n~n"
"If you cannot upgrade now and want to re-enable SSL listeners, you can~n"
"set the config item 'ssl_allow_poodle_attack' to 'true' in the~n"
- "'rabbit' section of your configuration file.~n",
+ "'rabbit' section of your configuration file.",
[rabbit_misc:otp_release(), Context]).
fix_ssl_options(Config) ->
@@ -170,24 +176,30 @@ tcp_listener_addresses({Host, Port, Family0})
[{IPAddress, Port, Family} ||
{IPAddress, Family} <- getaddr(Host, Family0)];
tcp_listener_addresses({_Host, Port, _Family0}) ->
- rabbit_log:error("invalid port ~p - not 0..65535~n", [Port]),
+ rabbit_log:error("invalid port ~p - not 0..65535", [Port]),
throw({error, {invalid_port, Port}}).
tcp_listener_addresses_auto(Port) ->
lists:append([tcp_listener_addresses(Listener) ||
Listener <- port_to_listeners(Port)]).
+tcp_listener_spec(NamePrefix, Address, SocketOpts, Transport, ProtoSup, ProtoOpts,
+ Protocol, NumAcceptors, Label) ->
+ tcp_listener_spec(NamePrefix, Address, SocketOpts, Transport, ProtoSup, ProtoOpts,
+ Protocol, NumAcceptors, 1, Label).
+
-spec tcp_listener_spec
(name_prefix(), address(), [gen_tcp:listen_option()], module(), module(),
- any(), protocol(), non_neg_integer(), label()) ->
+ any(), protocol(), non_neg_integer(), non_neg_integer(), label()) ->
supervisor:child_spec().
tcp_listener_spec(NamePrefix, {IPAddress, Port, Family}, SocketOpts,
- Transport, ProtoSup, ProtoOpts, Protocol, NumAcceptors, Label) ->
+ Transport, ProtoSup, ProtoOpts, Protocol, NumAcceptors,
+ ConcurrentConnsSupsCount, Label) ->
Args = [IPAddress, Port, Transport, [Family | SocketOpts], ProtoSup, ProtoOpts,
{?MODULE, tcp_listener_started, [Protocol, SocketOpts]},
{?MODULE, tcp_listener_stopped, [Protocol, SocketOpts]},
- NumAcceptors, Label],
+ NumAcceptors, ConcurrentConnsSupsCount, Label],
{rabbit_misc:tcp_name(NamePrefix, IPAddress, Port),
{tcp_listener_sup, start_link, Args},
transient, infinity, supervisor, [tcp_listener_sup]}.
@@ -241,29 +253,44 @@ stop_ranch_listener_of_protocol(Protocol) ->
listener_config(), integer()) -> 'ok' | {'error', term()}.
start_tcp_listener(Listener, NumAcceptors) ->
- start_listener(Listener, NumAcceptors, amqp, "TCP listener", tcp_opts()).
+ start_tcp_listener(Listener, NumAcceptors, 1).
+
+-spec start_tcp_listener(
+ listener_config(), integer(), integer()) -> 'ok' | {'error', term()}.
+
+start_tcp_listener(Listener, NumAcceptors, ConcurrentConnsSupsCount) ->
+ start_listener(Listener, NumAcceptors, ConcurrentConnsSupsCount, amqp,
+ "TCP listener", tcp_opts()).
-spec start_ssl_listener(
listener_config(), rabbit_types:infos(), integer()) -> 'ok' | {'error', term()}.
start_ssl_listener(Listener, SslOpts, NumAcceptors) ->
- start_listener(Listener, NumAcceptors, 'amqp/ssl', "TLS (SSL) listener", tcp_opts() ++ SslOpts).
+ start_ssl_listener(Listener, SslOpts, NumAcceptors, 1).
+-spec start_ssl_listener(
+ listener_config(), rabbit_types:infos(), integer(), integer()) -> 'ok' | {'error', term()}.
+
+start_ssl_listener(Listener, SslOpts, NumAcceptors, ConcurrentConnsSupsCount) ->
+ start_listener(Listener, NumAcceptors, ConcurrentConnsSupsCount, 'amqp/ssl',
+ "TLS (SSL) listener", tcp_opts() ++ SslOpts).
-spec start_listener(
- listener_config(), integer(), protocol(), label(), list()) -> 'ok' | {'error', term()}.
-start_listener(Listener, NumAcceptors, Protocol, Label, Opts) ->
+ listener_config(), integer(), integer(), protocol(), label(), list()) ->
+ 'ok' | {'error', term()}.
+start_listener(Listener, NumAcceptors, ConcurrentConnsSupsCount, Protocol, Label, Opts) ->
lists:foldl(fun (Address, ok) ->
- start_listener0(Address, NumAcceptors, Protocol, Label, Opts);
+ start_listener0(Address, NumAcceptors, ConcurrentConnsSupsCount, Protocol,
+ Label, Opts);
(_Address, {error, _} = Error) ->
Error
end, ok, tcp_listener_addresses(Listener)).
-start_listener0(Address, NumAcceptors, Protocol, Label, Opts) ->
+start_listener0(Address, NumAcceptors, ConcurrentConnsSupsCount, Protocol, Label, Opts) ->
Transport = transport(Protocol),
Spec = tcp_listener_spec(rabbit_tcp_listener_sup, Address, Opts,
Transport, rabbit_connection_sup, [], Protocol,
- NumAcceptors, Label),
+ NumAcceptors, ConcurrentConnsSupsCount, Label),
case supervisor:start_child(rabbit_sup, Spec) of
{ok, _} -> ok;
{error, {{shutdown, {failed_to_start_child, _,
@@ -370,11 +397,11 @@ on_node_down(Node) ->
case lists:member(Node, nodes()) of
false ->
rabbit_log:info(
- "Node ~s is down, deleting its listeners~n", [Node]),
+ "Node ~s is down, deleting its listeners", [Node]),
ok = mnesia:dirty_delete(rabbit_listener, Node);
true ->
rabbit_log:info(
- "Keeping ~s listeners: the node is already back~n", [Node])
+ "Keeping ~s listeners: the node is already back", [Node])
end.
-spec register_connection(pid()) -> ok.
@@ -456,11 +483,11 @@ close_connection(Pid, Explanation) ->
case lists:member(Pid, connections()) of
true ->
Res = rabbit_reader:shutdown(Pid, Explanation),
- rabbit_log:info("Closing connection ~p because ~p~n", [Pid, Explanation]),
+ rabbit_log:info("Closing connection ~p because ~p", [Pid, Explanation]),
Res;
false ->
rabbit_log:warning("Asked to close connection ~p (reason: ~p) "
- "but no running cluster node reported it as an active connection. Was it already closed? ~n",
+ "but no running cluster node reported it as an active connection. Was it already closed? ",
[Pid, Explanation]),
ok
end.
@@ -470,6 +497,12 @@ close_connections(Pids, Explanation) ->
[close_connection(Pid, Explanation) || Pid <- Pids],
ok.
+-spec close_all_user_connections(rabbit_types:username(), string()) -> 'ok'.
+close_all_user_connections(Username, Explanation) ->
+ Pids = [Pid || #tracked_connection{pid = Pid} <- rabbit_connection_tracking:list_of_user(Username)],
+ [close_connection(Pid, Explanation) || Pid <- Pids],
+ ok.
+
%% Meant to be used by tests only
-spec close_all_connections(string()) -> 'ok'.
close_all_connections(Explanation) ->
@@ -571,7 +604,7 @@ gethostaddr(Host, Family) ->
-spec host_lookup_error(_, _) -> no_return().
host_lookup_error(Host, Reason) ->
- rabbit_log:error("invalid host ~p - ~p~n", [Host, Reason]),
+ rabbit_log:error("invalid host ~p - ~p", [Host, Reason]),
throw({error, {invalid_host, Host, Reason}}).
resolve_family({_,_,_,_}, auto) -> inet;
diff --git a/deps/rabbit/src/rabbit_node_monitor.erl b/deps/rabbit/src/rabbit_node_monitor.erl
index b56180c54c..55b3bcb9eb 100644
--- a/deps/rabbit/src/rabbit_node_monitor.erl
+++ b/deps/rabbit/src/rabbit_node_monitor.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_node_monitor).
@@ -15,7 +15,8 @@
-export([start_link/0]).
-export([running_nodes_filename/0,
- cluster_status_filename/0, quorum_filename/0, default_quorum_filename/0,
+ cluster_status_filename/0, coordination_filename/0,
+ quorum_filename/0, default_quorum_filename/0,
prepare_cluster_status_files/0,
write_cluster_status/1, read_cluster_status/0,
update_cluster_status/0, reset_cluster_status/0]).
@@ -72,6 +73,9 @@ running_nodes_filename() ->
cluster_status_filename() ->
filename:join(rabbit_mnesia:dir(), "cluster_nodes.config").
+coordination_filename() ->
+ filename:join(rabbit_mnesia:dir(), "coordination").
+
quorum_filename() ->
ra_env:data_dir().
@@ -162,9 +166,11 @@ notify_node_up() ->
-spec notify_joined_cluster() -> 'ok'.
notify_joined_cluster() ->
- Nodes = rabbit_nodes:all_running() -- [node()],
+ NewMember = node(),
+ Nodes = rabbit_nodes:all_running() -- [NewMember],
gen_server:abcast(Nodes, ?SERVER,
{joined_cluster, node(), rabbit_mnesia:node_type()}),
+
ok.
-spec notify_left_cluster(node()) -> 'ok'.
@@ -322,11 +328,11 @@ find_blocked_global_peers1([], _) ->
unblock_global_peer(PeerNode) ->
ThisNode = node(),
PeerState = rpc:call(PeerNode, sys, get_status, [global_name_server]),
- error_logger:info_msg(
+ logger:info(
"Global hang workaround: global state on ~s seems broken~n"
" * Peer global state: ~p~n"
" * Local global state: ~p~n"
- "Faking nodedown/nodeup between ~s and ~s~n",
+ "Faking nodedown/nodeup between ~s and ~s",
[PeerNode, PeerState, sys:get_status(global_name_server),
PeerNode, ThisNode]),
{global_name_server, ThisNode} ! {nodedown, PeerNode},
@@ -360,7 +366,7 @@ init([]) ->
{ok, ensure_keepalive_timer(#state{monitors = Monitors,
subscribers = pmon:new(),
partitions = [],
- guid = rabbit_guid:gen(),
+ guid = erlang:system_info(creation),
node_guids = maps:new(),
autoheal = rabbit_autoheal:init()})}.
@@ -410,6 +416,13 @@ handle_cast(notify_node_up, State = #state{guid = GUID}) ->
%% disconnected, it would become a minority, pause, realise it's not
%% in a minority any more, and come back, still partitioned (albeit no
%% longer partially).
+%%
+%% UPDATE: The GUID is actually not a GUID anymore - it is the value
+%% returned by erlang:system_info(creation). This prevent false-positives
+%% in a situation when a node is restarted (Erlang VM is up) but the rabbit
+%% app is not yet up. The GUID was only generated and announced upon rabbit
+%% startup; creation is available immediately. Therefore we can tell that
+%% the node was restarted, before it announces the new value.
%% ----------------------------------------------------------------------------
handle_cast({node_up, Node, NodeType, GUID},
@@ -429,15 +442,18 @@ handle_cast({check_partial_partition, Node, Rep, NodeGUID, MyGUID, RepGUID},
maps:find(Node, GUIDs) =:= {ok, NodeGUID} of
true -> spawn_link( %%[1]
fun () ->
- case rpc:call(Node, rabbit, is_running, []) of
+ case rpc:call(Node, erlang, system_info, [creation]) of
{badrpc, _} -> ok;
- _ ->
+ NodeGUID ->
rabbit_log:warning("Received a 'DOWN' message"
" from ~p but still can"
- " communicate with it ~n",
+ " communicate with it ",
[Node]),
cast(Rep, {partial_partition,
- Node, node(), RepGUID})
+ Node, node(), RepGUID});
+ _ ->
+ rabbit_log:warning("Node ~p was restarted", [Node]),
+ ok
end
end);
false -> ok
@@ -468,7 +484,7 @@ handle_cast({partial_partition, NotReallyDown, Proxy, MyGUID},
{ok, pause_minority} ->
rabbit_log:error(
FmtBase ++ " * pause_minority mode enabled~n"
- "We will therefore pause until the *entire* cluster recovers~n",
+ "We will therefore pause until the *entire* cluster recovers",
ArgsBase),
await_cluster_recovery(fun all_nodes_up/0),
{noreply, State};
@@ -476,16 +492,16 @@ handle_cast({partial_partition, NotReallyDown, Proxy, MyGUID},
case in_preferred_partition(PreferredNodes) of
true -> rabbit_log:error(
FmtBase ++ "We will therefore intentionally "
- "disconnect from ~s~n", ArgsBase ++ [Proxy]),
+ "disconnect from ~s", ArgsBase ++ [Proxy]),
upgrade_to_full_partition(Proxy);
false -> rabbit_log:info(
FmtBase ++ "We are about to pause, no need "
- "for further actions~n", ArgsBase)
+ "for further actions", ArgsBase)
end,
{noreply, State};
{ok, _} ->
rabbit_log:error(
- FmtBase ++ "We will therefore intentionally disconnect from ~s~n",
+ FmtBase ++ "We will therefore intentionally disconnect from ~s",
ArgsBase ++ [Proxy]),
upgrade_to_full_partition(Proxy),
{noreply, State}
@@ -498,7 +514,7 @@ handle_cast({partial_partition, _GUID, _Reporter, _Proxy}, State) ->
%% messages reliably when another node disconnects from us. Therefore
%% we are told just before the disconnection so we can reciprocate.
handle_cast({partial_partition_disconnect, Other}, State) ->
- rabbit_log:error("Partial partition disconnect from ~s~n", [Other]),
+ rabbit_log:error("Partial partition disconnect from ~s", [Other]),
disconnect(Other),
{noreply, State};
@@ -507,7 +523,7 @@ handle_cast({partial_partition_disconnect, Other}, State) ->
%% mnesia propagation.
handle_cast({node_up, Node, NodeType},
State = #state{monitors = Monitors}) ->
- rabbit_log:info("rabbit on node ~p up~n", [Node]),
+ rabbit_log:info("rabbit on node ~p up", [Node]),
{AllNodes, DiscNodes, RunningNodes} = read_cluster_status(),
write_cluster_status({add_node(Node, AllNodes),
case NodeType of
@@ -532,6 +548,8 @@ handle_cast({joined_cluster, Node, NodeType}, State) ->
ram -> DiscNodes
end,
RunningNodes}),
+ rabbit_log:debug("Node '~p' has joined the cluster", [Node]),
+ rabbit_event:notify(node_added, [{node, Node}]),
{noreply, State};
handle_cast({left_cluster, Node}, State) ->
@@ -551,7 +569,7 @@ handle_cast(_Msg, State) ->
handle_info({'DOWN', _MRef, process, {rabbit, Node}, _Reason},
State = #state{monitors = Monitors, subscribers = Subscribers}) ->
- rabbit_log:info("rabbit on node ~p down~n", [Node]),
+ rabbit_log:info("rabbit on node ~p down", [Node]),
{AllNodes, DiscNodes, RunningNodes} = read_cluster_status(),
write_cluster_status({AllNodes, DiscNodes, del_node(Node, RunningNodes)}),
[P ! {node_down, Node} || P <- pmon:monitored(Subscribers)],
@@ -565,7 +583,7 @@ handle_info({'DOWN', _MRef, process, Pid, _Reason},
handle_info({nodedown, Node, Info}, State = #state{guid = MyGUID,
node_guids = GUIDs}) ->
- rabbit_log:info("node ~p down: ~p~n",
+ rabbit_log:info("node ~p down: ~p",
[Node, proplists:get_value(nodedown_reason, Info)]),
Check = fun (N, CheckGUID, DownGUID) ->
cast(N, {check_partial_partition,
@@ -583,7 +601,7 @@ handle_info({nodedown, Node, Info}, State = #state{guid = MyGUID,
{noreply, handle_dead_node(Node, State)};
handle_info({nodeup, Node, _Info}, State) ->
- rabbit_log:info("node ~p up~n", [Node]),
+ rabbit_log:info("node ~p up", [Node]),
{noreply, State};
handle_info({mnesia_system_event,
@@ -687,13 +705,13 @@ handle_dead_node(Node, State = #state{autoheal = Autoheal}) ->
State#state{autoheal = rabbit_autoheal:node_down(Node, Autoheal)};
{ok, Term} ->
rabbit_log:warning("cluster_partition_handling ~p unrecognised, "
- "assuming 'ignore'~n", [Term]),
+ "assuming 'ignore'", [Term]),
State
end.
await_cluster_recovery(Condition) ->
rabbit_log:warning("Cluster minority/secondary status detected - "
- "awaiting recovery~n", []),
+ "awaiting recovery", []),
run_outside_applications(fun () ->
rabbit:stop(),
wait_for_cluster_recovery(Condition)
@@ -744,7 +762,7 @@ do_run_outside_app_fun(Fun) ->
Fun()
catch _:E:Stacktrace ->
rabbit_log:error(
- "rabbit_outside_app_process:~n~p~n~p~n",
+ "rabbit_outside_app_process:~n~p~n~p",
[E, Stacktrace])
end.
@@ -868,7 +886,7 @@ majority() ->
majority([]).
majority(NodesDown) ->
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ Nodes = rabbit_nodes:all(),
AliveNodes = alive_nodes(Nodes) -- NodesDown,
length(AliveNodes) / length(Nodes) > 0.5.
@@ -881,29 +899,30 @@ in_preferred_partition(PreferredNodes) ->
in_preferred_partition(PreferredNodes, []).
in_preferred_partition(PreferredNodes, NodesDown) ->
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ Nodes = rabbit_nodes:all(),
RealPreferredNodes = [N || N <- PreferredNodes, lists:member(N, Nodes)],
AliveNodes = alive_nodes(RealPreferredNodes) -- NodesDown,
RealPreferredNodes =:= [] orelse AliveNodes =/= [].
all_nodes_up() ->
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ Nodes = rabbit_nodes:all(),
length(alive_nodes(Nodes)) =:= length(Nodes).
-spec all_rabbit_nodes_up() -> boolean().
all_rabbit_nodes_up() ->
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ Nodes = rabbit_nodes:all(),
length(alive_rabbit_nodes(Nodes)) =:= length(Nodes).
+alive_nodes() -> alive_nodes(rabbit_nodes:all()).
+
-spec alive_nodes([node()]) -> [node()].
-alive_nodes() -> alive_nodes(rabbit_mnesia:cluster_nodes(all)).
alive_nodes(Nodes) -> [N || N <- Nodes, lists:member(N, [node()|nodes()])].
--spec alive_rabbit_nodes([node()]) -> [node()].
+alive_rabbit_nodes() -> alive_rabbit_nodes(rabbit_nodes:all()).
-alive_rabbit_nodes() -> alive_rabbit_nodes(rabbit_mnesia:cluster_nodes(all)).
+-spec alive_rabbit_nodes([node()]) -> [node()].
alive_rabbit_nodes(Nodes) ->
[N || N <- alive_nodes(Nodes), rabbit:is_running(N)].
@@ -913,14 +932,14 @@ alive_rabbit_nodes(Nodes) ->
-spec ping_all() -> 'ok'.
ping_all() ->
- [net_adm:ping(N) || N <- rabbit_mnesia:cluster_nodes(all)],
+ [net_adm:ping(N) || N <- rabbit_nodes:all()],
ok.
possibly_partitioned_nodes() ->
alive_rabbit_nodes() -- rabbit_nodes:all_running().
startup_log([]) ->
- rabbit_log:info("Starting rabbit_node_monitor~n", []);
+ rabbit_log:info("Starting rabbit_node_monitor", []);
startup_log(Nodes) ->
- rabbit_log:info("Starting rabbit_node_monitor, might be partitioned from ~p~n",
+ rabbit_log:info("Starting rabbit_node_monitor, might be partitioned from ~p",
[Nodes]).
diff --git a/deps/rabbit/src/rabbit_nodes.erl b/deps/rabbit/src/rabbit_nodes.erl
index 3034a4d513..092e8dc912 100644
--- a/deps/rabbit/src/rabbit_nodes.erl
+++ b/deps/rabbit/src/rabbit_nodes.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_nodes).
@@ -14,6 +14,8 @@
await_running_count/2, is_single_node_cluster/0,
boot/0]).
-export([persistent_cluster_id/0, seed_internal_cluster_id/0, seed_user_provided_cluster_name/0]).
+-export([all/0, all_running_with_hashes/0, target_cluster_size_hint/0, reached_target_cluster_size/0]).
+-export([lock_id/1, lock_retries/0]).
-include_lib("kernel/include/inet.hrl").
-include_lib("rabbit_common/include/rabbit.hrl").
@@ -22,6 +24,14 @@
-define(INTERNAL_CLUSTER_ID_PARAM_NAME, internal_cluster_id).
+% Retries as passed to https://erlang.org/doc/man/global.html#set_lock-3
+% To understand how retries map to the timeout, read
+% https://github.com/erlang/otp/blob/d256ae477014158a49bb860b283df9c040011197/lib/kernel/src/global.erl#L2062-L2075
+% 80 corresponds to a timeout of ca 300 seconds.
+-define(DEFAULT_LOCK_RETRIES, 80).
+
+-define(DEFAULT_TARGET_CLUSTER_SIZE, 1).
+
%%----------------------------------------------------------------------------
%% API
%%----------------------------------------------------------------------------
@@ -72,8 +82,10 @@ is_process_running(Node, Process) ->
-spec cluster_name() -> binary().
cluster_name() ->
- rabbit_runtime_parameters:value_global(
- cluster_name, cluster_name_default()).
+ case rabbit_runtime_parameters:value_global(cluster_name) of
+ not_found -> cluster_name_default();
+ Name -> Name
+ end.
cluster_name_default() ->
{ID, _} = parts(node()),
@@ -127,6 +139,9 @@ set_cluster_name(Name, Username) ->
ensure_epmd() ->
rabbit_nodes_common:ensure_epmd().
+-spec all() -> [node()].
+all() -> rabbit_mnesia:cluster_nodes(all).
+
-spec all_running() -> [node()].
all_running() -> rabbit_mnesia:cluster_nodes(running).
@@ -134,7 +149,7 @@ all_running() -> rabbit_mnesia:cluster_nodes(running).
running_count() -> length(all_running()).
-spec total_count() -> integer().
-total_count() -> length(rabbit_mnesia:cluster_nodes(all)).
+total_count() -> length(rabbit_nodes:all()).
-spec is_single_node_cluster() -> boolean().
is_single_node_cluster() ->
@@ -155,3 +170,37 @@ await_running_count_with_retries(TargetCount, Retries) ->
timer:sleep(?SAMPLING_INTERVAL),
await_running_count_with_retries(TargetCount, Retries - 1)
end.
+
+-spec all_running_with_hashes() -> #{non_neg_integer() => node()}.
+all_running_with_hashes() ->
+ maps:from_list([{erlang:phash2(Node), Node} || Node <- all_running()]).
+
+-spec target_cluster_size_hint() -> non_neg_integer().
+target_cluster_size_hint() ->
+ cluster_formation_key_or_default(target_cluster_size_hint, ?DEFAULT_TARGET_CLUSTER_SIZE).
+
+-spec reached_target_cluster_size() -> boolean().
+reached_target_cluster_size() ->
+ running_count() >= target_cluster_size_hint().
+
+
+-spec lock_id(Node :: node()) -> {ResourceId :: string(), LockRequesterId :: node()}.
+lock_id(Node) ->
+ {cookie_hash(), Node}.
+
+-spec lock_retries() -> integer().
+lock_retries() ->
+ cluster_formation_key_or_default(internal_lock_retries, ?DEFAULT_LOCK_RETRIES).
+
+
+%%
+%% Implementation
+%%
+
+cluster_formation_key_or_default(Key, Default) ->
+ case application:get_env(rabbit, cluster_formation) of
+ {ok, PropList} ->
+ proplists:get_value(Key, PropList, Default);
+ undefined ->
+ Default
+ end. \ No newline at end of file
diff --git a/deps/rabbit/src/rabbit_osiris_metrics.erl b/deps/rabbit/src/rabbit_osiris_metrics.erl
index 7b2574c7e1..710ce1b65e 100644
--- a/deps/rabbit/src/rabbit_osiris_metrics.erl
+++ b/deps/rabbit/src/rabbit_osiris_metrics.erl
@@ -1,16 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_osiris_metrics).
@@ -32,7 +24,9 @@
state,
leader,
online,
- members
+ members,
+ memory,
+ readers
]).
-record(state, {timeout :: non_neg_integer()}).
@@ -63,32 +57,34 @@ handle_info(tick, #state{timeout = Timeout} = State) ->
maps:map(
fun ({osiris_writer, QName}, #{offset := Offs,
first_offset := FstOffs}) ->
- COffs = Offs + 1 - FstOffs,
- rabbit_core_metrics:queue_stats(QName, COffs, 0, COffs, 0),
- Infos = try
- %% TODO complete stats!
- case rabbit_amqqueue:lookup(QName) of
- {ok, Q} ->
- rabbit_stream_queue:info(Q, ?STATISTICS_KEYS);
- _ ->
- []
- end
- catch
- _:_ ->
- %% It's possible that the writer has died but
- %% it's still on the amqqueue record, so the
- %% `erlang:process_info/2` calls will return
- %% `undefined` and crash with a badmatch.
- %% At least for now, skipping the metrics might
- %% be the best option. Otherwise this brings
- %% down `rabbit_sup` and the whole `rabbit` app.
- []
- end,
- rabbit_core_metrics:queue_stats(QName, Infos),
- rabbit_event:notify(queue_stats, Infos ++ [{name, QName},
- {messages, COffs},
- {messages_ready, COffs},
- {messages_unacknowledged, 0}]),
+ COffs = Offs + 1 - FstOffs,
+ rabbit_core_metrics:queue_stats(QName, COffs, 0, COffs, 0),
+ Infos = try
+ %% TODO complete stats!
+ case rabbit_amqqueue:lookup(QName) of
+ {ok, Q} ->
+ rabbit_stream_queue:info(Q, ?STATISTICS_KEYS);
+ _ ->
+ []
+ end
+ catch
+ _:_ ->
+ %% It's possible that the writer has died but
+ %% it's still on the amqqueue record, so the
+ %% `erlang:process_info/2` calls will return
+ %% `undefined` and crash with a badmatch.
+ %% At least for now, skipping the metrics might
+ %% be the best option. Otherwise this brings
+ %% down `rabbit_sup` and the whole `rabbit` app.
+ []
+ end,
+
+
+ rabbit_core_metrics:queue_stats(QName, Infos),
+ rabbit_event:notify(queue_stats, Infos ++ [{name, QName},
+ {messages, COffs},
+ {messages_ready, COffs},
+ {messages_unacknowledged, 0}]),
ok;
(_, _V) ->
ok
diff --git a/deps/rabbit/src/rabbit_parameter_validation.erl b/deps/rabbit/src/rabbit_parameter_validation.erl
index 66287ec799..997c6483cd 100644
--- a/deps/rabbit/src/rabbit_parameter_validation.erl
+++ b/deps/rabbit/src/rabbit_parameter_validation.erl
@@ -2,12 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_parameter_validation).
--export([number/2, integer/2, binary/2, boolean/2, list/2, regex/2, proplist/3, enum/1]).
+-export([number/2, integer/2, binary/2, amqp091_queue_name/2,
+ boolean/2, list/2, regex/2, proplist/3, enum/1]).
number(_Name, Term) when is_number(Term) ->
ok;
@@ -27,6 +28,16 @@ binary(_Name, Term) when is_binary(Term) ->
binary(Name, Term) ->
{error, "~s should be binary, actually was ~p", [Name, Term]}.
+amqp091_queue_name(Name, S) when is_binary(S) ->
+ case size(S) of
+ Len when Len =< 255 -> ok;
+ _ -> {error, "~s should be less than 255 bytes, actually was ~p", [Name, size(S)]}
+ end;
+
+amqp091_queue_name(Name, Term) ->
+ {error, "~s should be binary, actually was ~p", [Name, Term]}.
+
+
boolean(_Name, Term) when is_boolean(Term) ->
ok;
boolean(Name, Term) ->
@@ -50,7 +61,20 @@ regex(Name, Term) ->
proplist(Name, Constraints, Term) when is_list(Term) ->
{Results, Remainder}
= lists:foldl(
- fun ({Key, Fun, Needed}, {Results0, Term0}) ->
+ %% if the optional/mandatory flag is not provided in a constraint tuple,
+ %% assume 'optional'
+ fun ({Key, Fun}, {Results0, Term0}) ->
+ case lists:keytake(Key, 1, Term0) of
+ {value, {Key, Value}, Term1} ->
+ {[Fun(Key, Value) | Results0],
+ Term1};
+ {value, {Key, Type, Value}, Term1} ->
+ {[Fun(Key, Type, Value) | Results0],
+ Term1};
+ false ->
+ {Results0, Term0}
+ end;
+ ({Key, Fun, Needed}, {Results0, Term0}) ->
case {lists:keytake(Key, 1, Term0), Needed} of
{{value, {Key, Value}, Term1}, _} ->
{[Fun(Key, Value) | Results0],
diff --git a/deps/rabbit/src/rabbit_password.erl b/deps/rabbit/src/rabbit_password.erl
index 6a5254b707..872c490db2 100644
--- a/deps/rabbit/src/rabbit_password.erl
+++ b/deps/rabbit/src/rabbit_password.erl
@@ -2,11 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_password).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-define(DEFAULT_HASHING_MODULE, rabbit_password_hashing_sha256).
diff --git a/deps/rabbit/src/rabbit_password_hashing_md5.erl b/deps/rabbit/src/rabbit_password_hashing_md5.erl
index 1e306673ca..9e3ea81490 100644
--- a/deps/rabbit/src/rabbit_password_hashing_md5.erl
+++ b/deps/rabbit/src/rabbit_password_hashing_md5.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% Legacy hashing implementation, only used as a last resort when
diff --git a/deps/rabbit/src/rabbit_password_hashing_sha256.erl b/deps/rabbit/src/rabbit_password_hashing_sha256.erl
index 3ccc298efd..6bc59bd92a 100644
--- a/deps/rabbit/src/rabbit_password_hashing_sha256.erl
+++ b/deps/rabbit/src/rabbit_password_hashing_sha256.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_password_hashing_sha256).
diff --git a/deps/rabbit/src/rabbit_password_hashing_sha512.erl b/deps/rabbit/src/rabbit_password_hashing_sha512.erl
index c5edf8888a..dab6f2e252 100644
--- a/deps/rabbit/src/rabbit_password_hashing_sha512.erl
+++ b/deps/rabbit/src/rabbit_password_hashing_sha512.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_password_hashing_sha512).
diff --git a/deps/rabbit/src/rabbit_peer_discovery.erl b/deps/rabbit/src/rabbit_peer_discovery.erl
index 1688579450..f386f36210 100644
--- a/deps/rabbit/src/rabbit_peer_discovery.erl
+++ b/deps/rabbit/src/rabbit_peer_discovery.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery).
@@ -14,8 +14,7 @@
-export([maybe_init/0, discover_cluster_nodes/0, backend/0, node_type/0,
normalize/1, format_discovered_nodes/1, log_configured_backend/0,
register/0, unregister/0, maybe_register/0, maybe_unregister/0,
- maybe_inject_randomized_delay/0, lock/0, unlock/1,
- discovery_retries/0]).
+ lock/0, unlock/1, discovery_retries/0]).
-export([append_node_prefix/1, node_prefix/0, locking_retry_timeout/0,
lock_acquisition_failure_mode/0]).
@@ -28,9 +27,6 @@
%% default node prefix to attach to discovered hostnames
-define(DEFAULT_PREFIX, "rabbit").
-%% default randomized delay range, in seconds
--define(DEFAULT_STARTUP_RANDOMIZED_DELAY, {5, 60}).
-
%% default discovery retries and interval.
-define(DEFAULT_DISCOVERY_RETRY_COUNT, 10).
-define(DEFAULT_DISCOVERY_RETRY_INTERVAL_MS, 500).
@@ -84,7 +80,7 @@ lock_acquisition_failure_mode() ->
-spec log_configured_backend() -> ok.
log_configured_backend() ->
- rabbit_log:info("Configured peer discovery backend: ~s~n", [backend()]).
+ rabbit_log:info("Configured peer discovery backend: ~s", [backend()]).
maybe_init() ->
Backend = backend(),
@@ -159,61 +155,6 @@ discovery_retries() ->
{?DEFAULT_DISCOVERY_RETRY_COUNT, ?DEFAULT_DISCOVERY_RETRY_INTERVAL_MS}
end.
-
--spec maybe_inject_randomized_delay() -> ok.
-maybe_inject_randomized_delay() ->
- Backend = backend(),
- case Backend:supports_registration() of
- true ->
- rabbit_log:info("Peer discovery backend ~s supports registration.", [Backend]),
- inject_randomized_delay();
- false ->
- rabbit_log:info("Peer discovery backend ~s does not support registration, skipping randomized startup delay.", [Backend]),
- ok
- end.
-
--spec inject_randomized_delay() -> ok.
-
-inject_randomized_delay() ->
- {Min, Max} = randomized_delay_range_in_ms(),
- case {Min, Max} of
- %% When the max value is set to 0, consider the delay to be disabled.
- %% In addition, `rand:uniform/1` will fail with a "no function clause"
- %% when the argument is 0.
- {_, 0} ->
- rabbit_log:info("Randomized delay range's upper bound is set to 0. Considering it disabled."),
- ok;
- {_, N} when is_number(N) ->
- rand:seed(exsplus),
- RandomVal = rand:uniform(round(N)),
- rabbit_log:debug("Randomized startup delay: configured range is from ~p to ~p milliseconds, PRNG pick: ~p...",
- [Min, Max, RandomVal]),
- Effective = case RandomVal < Min of
- true -> Min;
- false -> RandomVal
- end,
- rabbit_log:info("Will wait for ~p milliseconds before proceeding with registration...", [Effective]),
- timer:sleep(Effective),
- ok
- end.
-
--spec randomized_delay_range_in_ms() -> {integer(), integer()}.
-
-randomized_delay_range_in_ms() ->
- Backend = backend(),
- Default = case erlang:function_exported(Backend, randomized_startup_delay_range, 0) of
- true -> Backend:randomized_startup_delay_range();
- false -> ?DEFAULT_STARTUP_RANDOMIZED_DELAY
- end,
- {Min, Max} = case application:get_env(rabbit, cluster_formation) of
- {ok, Proplist} ->
- proplists:get_value(randomized_startup_delay_range, Proplist, Default);
- undefined ->
- Default
- end,
- {Min * 1000, Max * 1000}.
-
-
-spec register() -> ok.
register() ->
diff --git a/deps/rabbit/src/rabbit_peer_discovery_classic_config.erl b/deps/rabbit/src/rabbit_peer_discovery_classic_config.erl
index 8bc7382a75..b8965f0511 100644
--- a/deps/rabbit/src/rabbit_peer_discovery_classic_config.erl
+++ b/deps/rabbit/src/rabbit_peer_discovery_classic_config.erl
@@ -2,13 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_classic_config).
-behaviour(rabbit_peer_discovery_backend).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([list_nodes/0, supports_registration/0, register/0, unregister/0,
post_registration/0, lock/1, unlock/1]).
@@ -26,12 +26,37 @@ list_nodes() ->
Nodes when is_list(Nodes) -> {ok, {Nodes, disc}}
end.
+-spec lock(Node :: node()) -> {ok, {{ResourceId :: string(), LockRequesterId :: node()}, Nodes :: [node()]}} |
+ {error, Reason :: string()}.
+
+lock(Node) ->
+ {ok, {Nodes, _NodeType}} = list_nodes(),
+ case lists:member(Node, Nodes) of
+ false when Nodes =/= [] ->
+ rabbit_log:warning("Local node ~s is not part of configured nodes ~p. "
+ "This might lead to incorrect cluster formation.", [Node, Nodes]);
+ _ -> ok
+ end,
+ LockId = rabbit_nodes:lock_id(Node),
+ Retries = rabbit_nodes:lock_retries(),
+ case global:set_lock(LockId, Nodes, Retries) of
+ true ->
+ {ok, {LockId, Nodes}};
+ false ->
+ {error, io_lib:format("Acquiring lock taking too long, bailing out after ~b retries", [Retries])}
+ end.
+
+-spec unlock({{ResourceId :: string(), LockRequesterId :: node()}, Nodes :: [node()]}) ->
+ ok.
+
+unlock({LockId, Nodes}) ->
+ global:del_lock(LockId, Nodes),
+ ok.
+
-spec supports_registration() -> boolean().
supports_registration() ->
- %% If we don't have any nodes configured, skip randomized delay and similar operations
- %% as we don't want to delay startup for no reason. MK.
- has_any_peer_nodes_configured().
+ false.
-spec register() -> ok.
@@ -47,29 +72,3 @@ unregister() ->
post_registration() ->
ok.
-
--spec lock(Node :: atom()) -> not_supported.
-
-lock(_Node) ->
- not_supported.
-
--spec unlock(Data :: term()) -> ok.
-
-unlock(_Data) ->
- ok.
-
-%%
-%% Helpers
-%%
-
-has_any_peer_nodes_configured() ->
- case application:get_env(rabbit, cluster_nodes, []) of
- {[], _NodeType} ->
- false;
- {Nodes, _NodeType} when is_list(Nodes) ->
- true;
- [] ->
- false;
- Nodes when is_list(Nodes) ->
- true
- end.
diff --git a/deps/rabbit/src/rabbit_peer_discovery_dns.erl b/deps/rabbit/src/rabbit_peer_discovery_dns.erl
index 6e343a6e2d..3d30ac43be 100644
--- a/deps/rabbit/src/rabbit_peer_discovery_dns.erl
+++ b/deps/rabbit/src/rabbit_peer_discovery_dns.erl
@@ -2,13 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_dns).
-behaviour(rabbit_peer_discovery_backend).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([list_nodes/0, supports_registration/0, register/0, unregister/0,
post_registration/0, lock/1, unlock/1]).
diff --git a/deps/rabbit/src/rabbit_plugins.erl b/deps/rabbit/src/rabbit_plugins.erl
index 5697ffc29a..a631f07884 100644
--- a/deps/rabbit/src/rabbit_plugins.erl
+++ b/deps/rabbit/src/rabbit_plugins.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_plugins).
@@ -56,13 +56,13 @@ ensure1(FileJustChanged0) ->
{[], []} ->
ok;
{[], _} ->
- rabbit_log:info("Plugins changed; disabled ~p~n",
+ rabbit_log:info("Plugins changed; disabled ~p",
[Stop]);
{_, []} ->
- rabbit_log:info("Plugins changed; enabled ~p~n",
+ rabbit_log:info("Plugins changed; enabled ~p",
[Start]);
{_, _} ->
- rabbit_log:info("Plugins changed; enabled ~p, disabled ~p~n",
+ rabbit_log:info("Plugins changed; enabled ~p, disabled ~p",
[Start, Stop])
end,
{ok, Start, Stop};
@@ -429,7 +429,7 @@ prepare_dir_plugin(PluginAppDescPath) ->
rabbit_log:error("Failed to enable plugin \"~s\": "
"it may have been built with an "
"incompatible (more recent?) "
- "version of Erlang~n", [Plugin]),
+ "version of Erlang", [Plugin]),
throw({plugin_built_with_incompatible_erlang, Plugin});
Error ->
throw({plugin_module_unloadable, Plugin, Error})
@@ -459,11 +459,11 @@ prepare_plugin(#plugin{type = ez, name = Name, location = Location}, ExpandDir)
[PluginAppDescPath|_] ->
prepare_dir_plugin(PluginAppDescPath);
_ ->
- rabbit_log:error("Plugin archive '~s' doesn't contain an .app file~n", [Location]),
+ rabbit_log:error("Plugin archive '~s' doesn't contain an .app file", [Location]),
throw({app_file_missing, Name, Location})
end;
{error, Reason} ->
- rabbit_log:error("Could not unzip plugin archive '~s': ~p~n", [Location, Reason]),
+ rabbit_log:error("Could not unzip plugin archive '~s': ~p", [Location, Reason]),
throw({failed_to_unzip_plugin, Name, Location, Reason})
end;
prepare_plugin(#plugin{type = dir, location = Location, name = Name},
@@ -472,7 +472,7 @@ prepare_plugin(#plugin{type = dir, location = Location, name = Name},
[PluginAppDescPath|_] ->
prepare_dir_plugin(PluginAppDescPath);
_ ->
- rabbit_log:error("Plugin directory '~s' doesn't contain an .app file~n", [Location]),
+ rabbit_log:error("Plugin directory '~s' doesn't contain an .app file", [Location]),
throw({app_file_missing, Name, Location})
end.
@@ -548,7 +548,7 @@ split_path(PathString) ->
{unix, _} -> ":";
{win32, _} -> ";"
end,
- string:tokens(PathString, Delimiters).
+ lists:usort(string:tokens(PathString, Delimiters)).
%% Search for files using glob in a given dir. Returns full filenames of those files.
full_path_wildcard(Glob, Dir) ->
diff --git a/deps/rabbit/src/rabbit_policies.erl b/deps/rabbit/src/rabbit_policies.erl
index 54e4d2c03e..e23d12d81a 100644
--- a/deps/rabbit/src/rabbit_policies.erl
+++ b/deps/rabbit/src/rabbit_policies.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_policies).
@@ -13,7 +13,7 @@
-behaviour(rabbit_policy_validator).
-behaviour(rabbit_policy_merge_strategy).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([register/0, validate_policy/1, merge_policy_value/3]).
@@ -40,7 +40,7 @@ register() ->
{policy_validator, <<"overflow">>},
{policy_validator, <<"delivery-limit">>},
{policy_validator, <<"max-age">>},
- {policy_validator, <<"max-segment-size">>},
+ {policy_validator, <<"stream-max-segment-size-bytes">>},
{policy_validator, <<"queue-leader-locator">>},
{policy_validator, <<"initial-cluster-size">>},
{operator_policy_validator, <<"expires">>},
@@ -164,10 +164,10 @@ validate_policy0(<<"initial-cluster-size">>, Value)
validate_policy0(<<"initial-cluster-size">>, Value) ->
{error, "~p is not a valid cluster size", [Value]};
-validate_policy0(<<"max-segment-size">>, Value)
+validate_policy0(<<"stream-max-segment-size-bytes">>, Value)
when is_integer(Value), Value >= 0 ->
ok;
-validate_policy0(<<"max-segment-size">>, Value) ->
+validate_policy0(<<"stream-max-segment-size-bytes">>, Value) ->
{error, "~p is not a valid segment size", [Value]}.
merge_policy_value(<<"message-ttl">>, Val, OpVal) -> min(Val, OpVal);
@@ -176,4 +176,6 @@ merge_policy_value(<<"max-length-bytes">>, Val, OpVal) -> min(Val, OpVal);
merge_policy_value(<<"max-in-memory-length">>, Val, OpVal) -> min(Val, OpVal);
merge_policy_value(<<"max-in-memory-bytes">>, Val, OpVal) -> min(Val, OpVal);
merge_policy_value(<<"expires">>, Val, OpVal) -> min(Val, OpVal);
-merge_policy_value(<<"delivery-limit">>, Val, OpVal) -> min(Val, OpVal).
+merge_policy_value(<<"delivery-limit">>, Val, OpVal) -> min(Val, OpVal);
+%% use operator policy value for booleans
+merge_policy_value(_Key, Val, OpVal) when is_boolean(Val) andalso is_boolean(OpVal) -> OpVal.
diff --git a/deps/rabbit/src/rabbit_policy.erl b/deps/rabbit/src/rabbit_policy.erl
index 44807de97d..c60dd75ab1 100644
--- a/deps/rabbit/src/rabbit_policy.erl
+++ b/deps/rabbit/src/rabbit_policy.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_policy).
@@ -38,8 +38,12 @@
-export([validate/5, notify/5, notify_clear/4]).
-export([parse_set/7, set/7, delete/3, lookup/2, list/0, list/1,
list_formatted/1, list_formatted/3, info_keys/0]).
--export([parse_set_op/7, set_op/7, delete_op/3, lookup_op/2, list_op/0, list_op/1,
- list_formatted_op/1, list_formatted_op/3]).
+-export([parse_set_op/7, set_op/7, delete_op/3, lookup_op/2, list_op/0, list_op/1, list_op/2,
+ list_formatted_op/1, list_formatted_op/3,
+ match_all/2, match_as_map/1, match_op_as_map/1, definition_keys/1,
+ list_in/1, list_in/2, list_as_maps/0, list_as_maps/1, list_op_as_maps/1
+ ]).
+-export([sort_by_priority/1]).
-rabbit_boot_step({?MODULE,
[{description, "policy parameters"},
@@ -98,12 +102,63 @@ set(Q0) when ?is_amqqueue(Q0) ->
set(X = #exchange{name = Name}) ->
X#exchange{policy = match(Name), operator_policy = match_op(Name)}.
+
+list() ->
+ list('_').
+
+list(VHost) ->
+ list0(VHost, fun ident/1).
+
+list_in(VHost) ->
+ list(VHost).
+
+list_in(VHost, DefinitionKeys) ->
+ [P || P <- list_in(VHost), keys_overlap(definition_keys(P), DefinitionKeys)].
+
+list_as_maps() ->
+ list_as_maps('_').
+
+list_as_maps(VHost) ->
+ [maps:from_list(PL) || PL <- sort_by_priority(list0(VHost, fun maps:from_list/1))].
+
+list_op_as_maps(VHost) ->
+ [maps:from_list(PL) || PL <- sort_by_priority(list0_op(VHost, fun maps:from_list/1))].
+
+list_formatted(VHost) ->
+ sort_by_priority(list0(VHost, fun rabbit_json:encode/1)).
+
+list_formatted(VHost, Ref, AggregatorPid) ->
+ rabbit_control_misc:emitting_map(AggregatorPid, Ref,
+ fun(P) -> P end, list_formatted(VHost)).
+
+list_op() ->
+ list_op('_').
+
+list_op(VHost) ->
+ list0_op(VHost, fun ident/1).
+
+list_op(VHost, DefinitionKeys) ->
+ [P || P <- list_op(VHost), keys_overlap(definition_keys(P), DefinitionKeys)].
+
+list_formatted_op(VHost) ->
+ sort_by_priority(list0_op(VHost, fun rabbit_json:encode/1)).
+
+list_formatted_op(VHost, Ref, AggregatorPid) ->
+ rabbit_control_misc:emitting_map(AggregatorPid, Ref,
+ fun(P) -> P end, list_formatted_op(VHost)).
+
match(Name = #resource{virtual_host = VHost}) ->
match(Name, list(VHost)).
match_op(Name = #resource{virtual_host = VHost}) ->
match(Name, list_op(VHost)).
+match_as_map(Name = #resource{virtual_host = VHost}) ->
+ [maps:from_list(PL) || PL <- match(Name, list(VHost))].
+
+match_op_as_map(Name = #resource{virtual_host = VHost}) ->
+ [maps:from_list(PL) || PL <- match(Name, list_op(VHost))].
+
get(Name, Q) when ?is_amqqueue(Q) ->
Policy = amqqueue:get_policy(Q),
OpPolicy = amqqueue:get_operator_policy(Q),
@@ -117,6 +172,21 @@ get(Name, EntityName = #resource{virtual_host = VHost}) ->
match(EntityName, list(VHost)),
match(EntityName, list_op(VHost))).
+match(Name, Policies) ->
+ case match_all(Name, Policies) of
+ [] -> undefined;
+ [Policy | _] -> Policy
+ end.
+
+match_all(Name, Policies) ->
+ lists:sort(fun priority_comparator/2, [P || P <- Policies, matches(Name, P)]).
+
+matches(#resource{name = Name, kind = Kind, virtual_host = VHost} = Resource, Policy) ->
+ matches_type(Kind, pget('apply-to', Policy)) andalso
+ is_applicable(Resource, pget(definition, Policy)) andalso
+ match =:= re:run(Name, pget(pattern, Policy), [{capture, none}]) andalso
+ VHost =:= pget(vhost, Policy).
+
get0(_Name, undefined, undefined) -> undefined;
get0(Name, undefined, OpPolicy) -> pget(Name, pget(definition, OpPolicy, []));
get0(Name, Policy, undefined) -> pget(Name, pget(definition, Policy, []));
@@ -291,42 +361,15 @@ lookup(VHost, Name) ->
P -> p(P, fun ident/1)
end.
-list_op() ->
- list_op('_').
-
-list_op(VHost) ->
- list0_op(VHost, fun ident/1).
-
-list_formatted_op(VHost) ->
- order_policies(list0_op(VHost, fun rabbit_json:encode/1)).
-
-list_formatted_op(VHost, Ref, AggregatorPid) ->
- rabbit_control_misc:emitting_map(AggregatorPid, Ref,
- fun(P) -> P end, list_formatted_op(VHost)).
-
list0_op(VHost, DefnFun) ->
[p(P, DefnFun)
|| P <- rabbit_runtime_parameters:list(VHost, <<"operator_policy">>)].
-
-list() ->
- list('_').
-
-list(VHost) ->
- list0(VHost, fun ident/1).
-
-list_formatted(VHost) ->
- order_policies(list0(VHost, fun rabbit_json:encode/1)).
-
-list_formatted(VHost, Ref, AggregatorPid) ->
- rabbit_control_misc:emitting_map(AggregatorPid, Ref,
- fun(P) -> P end, list_formatted(VHost)).
-
list0(VHost, DefnFun) ->
[p(P, DefnFun) || P <- rabbit_runtime_parameters:list(VHost, <<"policy">>)].
-order_policies(PropList) ->
- lists:sort(fun (A, B) -> not sort_pred(A, B) end, PropList).
+sort_by_priority(PropList) ->
+ lists:sort(fun (A, B) -> not priority_comparator(A, B) end, PropList).
p(Parameter, DefnFun) ->
Value = pget(value, Parameter),
@@ -341,6 +384,16 @@ ident(X) -> X.
info_keys() -> [vhost, name, 'apply-to', pattern, definition, priority].
+definition_keys(Policy) ->
+ case rabbit_data_coercion:to_map(Policy) of
+ #{definition := Def} ->
+ maps:keys(rabbit_data_coercion:to_map(Def));
+ _ -> []
+ end.
+
+keys_overlap(A, B) ->
+ lists:any(fun(Item) -> lists:member(Item, B) end, A).
+
%%----------------------------------------------------------------------------
validate(_VHost, <<"policy">>, Name, Term, _User) ->
@@ -350,24 +403,26 @@ validate(_VHost, <<"operator_policy">>, Name, Term, _User) ->
rabbit_parameter_validation:proplist(
Name, operator_policy_validation(), Term).
-notify(VHost, <<"policy">>, Name, Term, ActingUser) ->
+notify(VHost, <<"policy">>, Name, Term0, ActingUser) ->
+ Term = rabbit_data_coercion:atomize_keys(Term0),
+ update_matched_objects(VHost, Term, ActingUser),
rabbit_event:notify(policy_set, [{name, Name}, {vhost, VHost},
- {user_who_performed_action, ActingUser} | Term]),
- update_policies(VHost);
-notify(VHost, <<"operator_policy">>, Name, Term, ActingUser) ->
+ {user_who_performed_action, ActingUser} | Term]);
+notify(VHost, <<"operator_policy">>, Name, Term0, ActingUser) ->
+ Term = rabbit_data_coercion:atomize_keys(Term0),
+ update_matched_objects(VHost, Term, ActingUser),
rabbit_event:notify(policy_set, [{name, Name}, {vhost, VHost},
- {user_who_performed_action, ActingUser} | Term]),
- update_policies(VHost).
+ {user_who_performed_action, ActingUser} | Term]).
notify_clear(VHost, <<"policy">>, Name, ActingUser) ->
+ update_matched_objects(VHost, undefined, ActingUser),
rabbit_event:notify(policy_cleared, [{name, Name}, {vhost, VHost},
- {user_who_performed_action, ActingUser}]),
- update_policies(VHost);
+ {user_who_performed_action, ActingUser}]);
notify_clear(VHost, <<"operator_policy">>, Name, ActingUser) ->
+ update_matched_objects(VHost, undefined, ActingUser),
rabbit_event:notify(operator_policy_cleared,
[{name, Name}, {vhost, VHost},
- {user_who_performed_action, ActingUser}]),
- update_policies(VHost).
+ {user_who_performed_action, ActingUser}]).
%%----------------------------------------------------------------------------
@@ -376,10 +431,10 @@ notify_clear(VHost, <<"operator_policy">>, Name, ActingUser) ->
%% the comment in rabbit_binding:lock_route_tables/0 for more rationale.
%% [2] We could be here in a post-tx fun after the vhost has been
%% deleted; in which case it's fine to do nothing.
-update_policies(VHost) ->
+update_matched_objects(VHost, PolicyDef, ActingUser) ->
Tabs = [rabbit_queue, rabbit_durable_queue,
rabbit_exchange, rabbit_durable_exchange],
- {Xs, Qs} = rabbit_misc:execute_mnesia_transaction(
+ {XUpdateResults, QUpdateResults} = rabbit_misc:execute_mnesia_transaction(
fun() ->
[mnesia:lock({table, T}, write) || T <- Tabs], %% [1]
case catch {list(VHost), list_op(VHost)} of
@@ -394,8 +449,8 @@ update_policies(VHost) ->
Q <- rabbit_amqqueue:list(VHost)]}
end
end),
- [catch notify(X) || X <- Xs],
- [catch notify(Q) || Q <- Qs],
+ [catch maybe_notify_of_policy_change(XRes, PolicyDef, ActingUser) || XRes <- XUpdateResults],
+ [catch maybe_notify_of_policy_change(QRes, PolicyDef, ActingUser) || QRes <- QUpdateResults],
ok.
update_exchange(X = #exchange{name = XName,
@@ -441,46 +496,42 @@ update_queue(Q0, Policies, OpPolicies) when ?is_amqqueue(Q0) ->
end
end.
-notify(no_change)->
+maybe_notify_of_policy_change(no_change, _PolicyDef, _ActingUser)->
ok;
-notify({X1 = #exchange{}, X2 = #exchange{}}) ->
+maybe_notify_of_policy_change({X1 = #exchange{}, X2 = #exchange{}}, _PolicyDef, _ActingUser) ->
rabbit_exchange:policy_changed(X1, X2);
-notify({Q1, Q2}) when ?is_amqqueue(Q1), ?is_amqqueue(Q2) ->
+%% policy has been cleared
+maybe_notify_of_policy_change({Q1, Q2}, undefined, ActingUser) when ?is_amqqueue(Q1), ?is_amqqueue(Q2) ->
+ rabbit_event:notify(queue_policy_cleared, [
+ {name, amqqueue:get_name(Q2)},
+ {vhost, amqqueue:get_vhost(Q2)},
+ {type, amqqueue:get_type(Q2)},
+ {user_who_performed_action, ActingUser}
+ ]),
+ rabbit_amqqueue:policy_changed(Q1, Q2);
+%% policy has been added or updated
+maybe_notify_of_policy_change({Q1, Q2}, PolicyDef, ActingUser) when ?is_amqqueue(Q1), ?is_amqqueue(Q2) ->
+ rabbit_event:notify(queue_policy_updated, [
+ {name, amqqueue:get_name(Q2)},
+ {vhost, amqqueue:get_vhost(Q2)},
+ {type, amqqueue:get_type(Q2)},
+ {user_who_performed_action, ActingUser} | PolicyDef
+ ]),
rabbit_amqqueue:policy_changed(Q1, Q2).
-match(Name, Policies) ->
- case match_all(Name, Policies) of
- [] -> undefined;
- [Policy | _] -> Policy
- end.
-
-match_all(Name, Policies) ->
- lists:sort(fun sort_pred/2, [P || P <- Policies, matches(Name, P)]).
-
-matches(#resource{name = Name, kind = Kind, virtual_host = VHost} = Resource, Policy) ->
- matches_type(Kind, pget('apply-to', Policy)) andalso
- is_applicable(Resource, pget(definition, Policy)) andalso
- match =:= re:run(Name, pget(pattern, Policy), [{capture, none}]) andalso
- VHost =:= pget(vhost, Policy).
-
matches_type(exchange, <<"exchanges">>) -> true;
matches_type(queue, <<"queues">>) -> true;
matches_type(exchange, <<"all">>) -> true;
matches_type(queue, <<"all">>) -> true;
matches_type(_, _) -> false.
-sort_pred(A, B) -> pget(priority, A) >= pget(priority, B).
+priority_comparator(A, B) -> pget(priority, A) >= pget(priority, B).
is_applicable(#resource{kind = queue} = Resource, Policy) ->
- rabbit_amqqueue:is_policy_applicable(Resource, to_list(Policy));
+ rabbit_amqqueue:is_policy_applicable(Resource, rabbit_data_coercion:to_list(Policy));
is_applicable(_, _) ->
true.
-to_list(L) when is_list(L) ->
- L;
-to_list(M) when is_map(M) ->
- maps:to_list(M).
-
%%----------------------------------------------------------------------------
operator_policy_validation() ->
diff --git a/deps/rabbit/src/rabbit_policy_merge_strategy.erl b/deps/rabbit/src/rabbit_policy_merge_strategy.erl
index f2b79e5862..ab598af267 100644
--- a/deps/rabbit/src/rabbit_policy_merge_strategy.erl
+++ b/deps/rabbit/src/rabbit_policy_merge_strategy.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_policy_merge_strategy).
diff --git a/deps/rabbit/src/rabbit_prelaunch_cluster.erl b/deps/rabbit/src/rabbit_prelaunch_cluster.erl
index 9d3cda99e3..2456aefa9f 100644
--- a/deps/rabbit/src/rabbit_prelaunch_cluster.erl
+++ b/deps/rabbit/src/rabbit_prelaunch_cluster.erl
@@ -1,15 +1,24 @@
-module(rabbit_prelaunch_cluster).
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/logging.hrl").
+
-export([setup/1]).
setup(Context) ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Clustering =="),
- rabbit_log_prelaunch:debug("Preparing cluster status files"),
+ ?LOG_DEBUG(
+ "~n== Clustering ==", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_DEBUG(
+ "Preparing cluster status files", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
rabbit_node_monitor:prepare_cluster_status_files(),
case Context of
#{initial_pass := true} ->
- rabbit_log_prelaunch:debug("Upgrading Mnesia schema"),
+ ?LOG_DEBUG(
+ "Upgrading Mnesia schema", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok = rabbit_upgrade:maybe_upgrade_mnesia();
_ ->
ok
@@ -17,6 +26,8 @@ setup(Context) ->
%% It's important that the consistency check happens after
%% the upgrade, since if we are a secondary node the
%% primary node will have forgotten us
- rabbit_log_prelaunch:debug("Checking cluster consistency"),
+ ?LOG_DEBUG(
+ "Checking cluster consistency", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
rabbit_mnesia:check_cluster_consistency(),
ok.
diff --git a/deps/rabbit/src/rabbit_prelaunch_enabled_plugins_file.erl b/deps/rabbit/src/rabbit_prelaunch_enabled_plugins_file.erl
index 57fe32f8e6..660db4a6d1 100644
--- a/deps/rabbit/src/rabbit_prelaunch_enabled_plugins_file.erl
+++ b/deps/rabbit/src/rabbit_prelaunch_enabled_plugins_file.erl
@@ -2,18 +2,22 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_prelaunch_enabled_plugins_file).
+-include_lib("kernel/include/logger.hrl").
+
-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/logging.hrl").
-export([setup/1]).
setup(Context) ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Enabled plugins file =="),
+ ?LOG_DEBUG(
+ "~n== Enabled plugins file ==", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
update_enabled_plugins_file(Context).
%% -------------------------------------------------------------------
@@ -33,21 +37,28 @@ do_update_enabled_plugins_file(#{enabled_plugins_file := File}, List) ->
SortedList = lists:usort(List),
case SortedList of
[] ->
- rabbit_log_prelaunch:debug("Marking all plugins as disabled");
+ ?LOG_DEBUG(
+ "Marking all plugins as disabled", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH});
_ ->
- rabbit_log_prelaunch:debug(
- "Marking the following plugins as enabled:"),
- [rabbit_log_prelaunch:debug(" - ~s", [P]) || P <- SortedList]
+ ?LOG_DEBUG(
+ lists:flatten(["Marking the following plugins as enabled:",
+ ["~n - ~s" || _ <- SortedList]]),
+ SortedList,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})
end,
Content = io_lib:format("~p.~n", [SortedList]),
case file:write_file(File, Content) of
ok ->
- rabbit_log_prelaunch:debug("Wrote plugins file: ~ts", [File]),
+ ?LOG_DEBUG(
+ "Wrote plugins file: ~ts", [File],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok;
{error, Reason} ->
- rabbit_log_prelaunch:error(
+ ?LOG_ERROR(
"Failed to update enabled plugins file \"~ts\" "
"from $RABBITMQ_ENABLED_PLUGINS: ~ts",
- [File, file:format_error(Reason)]),
+ [File, file:format_error(Reason)],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({error, failed_to_update_enabled_plugins_file})
end.
diff --git a/deps/rabbit/src/rabbit_prelaunch_feature_flags.erl b/deps/rabbit/src/rabbit_prelaunch_feature_flags.erl
index cd7b276f4c..061e25468e 100644
--- a/deps/rabbit/src/rabbit_prelaunch_feature_flags.erl
+++ b/deps/rabbit/src/rabbit_prelaunch_feature_flags.erl
@@ -2,31 +2,40 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_prelaunch_feature_flags).
+-include_lib("kernel/include/logger.hrl").
+
+-include_lib("rabbit_common/include/logging.hrl").
+
-export([setup/1]).
setup(#{feature_flags_file := FFFile}) ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Feature flags =="),
+ ?LOG_DEBUG(
+ "~n== Feature flags ==", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
case filelib:ensure_dir(FFFile) of
ok ->
- rabbit_log_prelaunch:debug("Initializing feature flags registry"),
+ ?LOG_DEBUG(
+ "Initializing feature flags registry", [],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
case rabbit_feature_flags:initialize_registry() of
ok ->
ok;
{error, Reason} ->
- rabbit_log_prelaunch:error(
+ ?LOG_ERROR(
"Failed to initialize feature flags registry: ~p",
- [Reason]),
+ [Reason],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({error, failed_to_initialize_feature_flags_registry})
end;
{error, Reason} ->
- rabbit_log_prelaunch:error(
+ ?LOG_ERROR(
"Failed to create feature flags file \"~ts\" directory: ~ts",
- [FFFile, file:format_error(Reason)]),
+ [FFFile, file:format_error(Reason)],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({error, failed_to_create_feature_flags_file_directory})
end.
diff --git a/deps/rabbit/src/rabbit_prelaunch_logging.erl b/deps/rabbit/src/rabbit_prelaunch_logging.erl
index 6e3f040ec5..421280c34d 100644
--- a/deps/rabbit/src/rabbit_prelaunch_logging.erl
+++ b/deps/rabbit/src/rabbit_prelaunch_logging.erl
@@ -2,74 +2,1719 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
+%% @author The RabbitMQ team
+%% @copyright 2019-2021 VMware, Inc. or its affiliates.
+%%
+%% @doc
+%% This module manages the configuration of the Erlang Logger facility. In
+%% other words, it translates the RabbitMQ logging configuration (in the
+%% Cuttlefish format or classic Erlang-term-based configuration) into Erlang
+%% Logger handler setups.
+%%
+%% Configuring the Erlang Logger is done in two steps:
+%% <ol>
+%% <li>Logger handler configurations are created based on the configuration
+%% and the context (see {@link //rabbit_common/rabbit_env}).</li>
+%% <li>Created handlers are installed (i.e. they become active). Any handlers
+%% previously installed by this module are removed.</li>
+%% </ol>
+%%
+%% It also takes care of setting the `$ERL_CRASH_DUMP' variable to enable
+%% Erlang core dumps.
+%%
+%% Note that before this module handles the Erlang Logger, {@link
+%% //rabbitmq_prelaunch/rabbit_prelaunch_early_logging} configures basic
+%% logging to have messages logged as soon as possible during RabbitMQ
+%% startup.
+%%
+%% == How to configure RabbitMQ logging ==
+%%
+%% RabbitMQ supports a main/default logging output and per-category outputs.
+%% An output is a combination of a destination (a text file or stdout for
+%% example) and a message formatted (e.g. plain text or JSON).
+%%
+%% Here is the Erlang-term-based configuration expected and supported by this
+%% module:
+%%
+%% ```
+%% {rabbit, [
+%% {log_root, string()},
+%% {log, [
+%% {categories, [
+%% {default, [
+%% {level, Level}
+%% ]},
+%% {CategoryName, [
+%% {level, Level},
+%% {file, Filename}
+%% ]}
+%% ]},
+%%
+%% {console, [
+%% {level, Level},
+%% {enabled, boolean()}
+%% ]},
+%%
+%% {exchange, [
+%% {level, Level},
+%% {enabled, boolean()}
+%% ]},
+%%
+%% {file, [
+%% {level, Level},
+%% {file, Filename | false},
+%% {date, RotationDateSpec},
+%% {size, RotationSize},
+%% {count, RotationCount},
+%% ]},
+%%
+%% {journald, [
+%% {level, Level},
+%% {enabled, boolean()},
+%% {fields, proplists:proplist()}
+%% ]}
+%%
+%% {syslog, [
+%% {level, Level},
+%% {enabled, boolean()}
+%% ]}
+%% ]}
+%% ]}.
+%%
+%% Level = logger:level().
+%% Filename = file:filename().
+%% RotationDateSpec = string(). % Pattern format used by newsyslog.conf(5).
+%% RotationSize = non_neg_integer() | infinity.
+%% RotationCount = non_neg_integer().
+%% '''
+%%
+%% See `priv/schema/rabbit.schema' for the definition of the Cuttlefish
+%% configuration schema.
+
-module(rabbit_prelaunch_logging).
--export([setup/1]).
+-include_lib("kernel/include/logger.hrl").
+-include_lib("rabbit_common/include/logging.hrl").
+
+-export([setup/1,
+ set_log_level/1,
+ log_locations/0]).
+
+-ifdef(TEST).
+-export([clear_config_run_number/0,
+ get_less_severe_level/2]).
+-endif.
+
+-export_type([log_location/0]).
+
+-type log_location() :: file:filename() | string().
+%% A short description of an output.
+%%
+%% If the output is the console, the location is either `"<stdout>"' or
+%% `"<stderr>"'.
+%%
+%% If the output is an exchange, the location is the string `"exchange:"' with
+%% the exchange name appended.
+%%
+%% If the output is a file, the location is the absolute filename.
+%%
+%% If the output is journald, the location is `"<journald>"'.
+%%
+%% If the output is syslog, the location is the string `"syslog:"' with the
+%% syslog server hostname appended.
+
+-type category_name() :: atom().
+%% The name of a log category.
+%% Erlang Logger uses the concept of "domain" which is an ordered list of
+%% atoms. A category is mapped to the domain `[?RMQLOG_SUPER_DOMAIN_NAME,
+%% Category]'. In other words, a category is a subdomain of the `rabbitmq'
+%% domain.
+
+-type console_props() :: [{level, logger:level()} |
+ {enabled, boolean()} |
+ {stdio, stdout | stderr} |
+ {formatter, {atom(), term()}}].
+%% Console properties are the parameters in the configuration file for a
+%% console-based handler.
+
+-type exchange_props() :: [{level, logger:level()} |
+ {enabled, boolean()} |
+ {formatter, {atom(), term()}}].
+%% Exchange properties are the parameters in the configuration file for an
+%% exchange-based handler.
+
+-type file_props() :: [{level, logger:level()} |
+ {file, file:filename() | false} |
+ {date, string()} |
+ {size, non_neg_integer()} |
+ {count, non_neg_integer()} |
+ {formatter, {atom(), term()}}].
+%% File properties are the parameters in the configuration file for a
+%% file-based handler.
+
+-type journald_props() :: [{level, logger:level()} |
+ {enabled, boolean()} |
+ {fields, proplists:proplist()}].
+%% journald properties are the parameters in the configuration file for a
+%% journald-based handler.
+
+-type syslog_props() :: [{level, logger:level()} |
+ {enabled, boolean()} |
+ {formatter, {atom(), term()}}].
+%% Syslog properties are the parameters in the configuration file for a
+%% syslog-based handler.
+
+-type main_log_env() :: [{console, console_props()} |
+ {exchange, exchange_props()} |
+ {file, file_props()} |
+ {journald, journald_props()} |
+ {syslog, syslog_props()}].
+%% The main log environment is the parameters in the configuration file for
+%% the main log handler (i.e. where all messages go by default).
+
+-type per_cat_env() :: [{level, logger:level()} |
+ {file, file:filename()}].
+%% A per-category log environment is the parameters in the configuration file
+%% for a specific category log handler. There can be one per category.
+
+-type default_cat_env() :: [{level, logger:level()}].
+%% The `default' category log environment is special (read: awkward) in the
+%% configuration file. It is used to change the log level of the main log
+%% handler.
+
+-type log_app_env() :: [main_log_env() |
+ {categories, [{default, default_cat_env()} |
+ {category_name(), per_cat_env()}]}].
+%% The value for the `log' key in the `rabbit' application environment.
+
+-type global_log_config() :: #{level => logger:level() | all | none,
+ outputs := [logger:handler_config()]}.
+%% This is the internal structure used to prepare the handlers for the
+%% main/global messages (i.e. not marked with a specific category).
+
+-type per_cat_log_config() :: global_log_config().
+%% This is the internal structure used to prepare the handlers for
+%% category-specific messages.
+
+-type log_config() :: #{global := global_log_config(),
+ per_category := #{
+ category_name() => per_cat_log_config()}}.
+%% This is the internal structure to store the global and per-category
+%% configurations, to prepare the final handlers.
+
+-type handler_key() :: atom().
+%% Key used to deduplicate handlers before they are installed in Logger.
+
+-type id_assignment_state() :: #{config_run_number := pos_integer(),
+ next_file := pos_integer()}.
+%% State used while assigning IDs to handlers.
+
+-spec setup(rabbit_env:context()) -> ok.
+%% @doc
+%% Configures or reconfigures logging.
+%%
+%% The logging framework is the builtin Erlang Logger API. The configuration
+%% is based on the configuration file and the environment.
+%%
+%% In addition to logging, it sets the `$ERL_CRASH_DUMP' environment variable
+%% to enable Erlang crash dumps.
+%%
+%% @param Context the RabbitMQ context (see {@link
+%% //rabbitmq_prelaunch/rabbit_prelaunch:get_context/0}).
setup(Context) ->
- rabbit_log_prelaunch:debug(""),
- rabbit_log_prelaunch:debug("== Logging =="),
+ ?LOG_DEBUG("\n== Logging ==",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ok = compute_config_run_number(),
ok = set_ERL_CRASH_DUMP_envvar(Context),
- ok = configure_lager(Context).
+ ok = configure_logger(Context).
+
+-spec set_log_level(logger:level()) -> ok | {error, term()}.
+%% @doc
+%% Changes the log level.
+%%
+%% The log level is changed for the following layers of filtering:
+%% <ul>
+%% <li>the primary log level</li>
+%% <li>the module log level(s)</li>
+%% <li>the handler log level(s)</li>
+%% </ul>
+%%
+%% @param Level the new log level.
+
+set_log_level(Level) ->
+ %% Primary log level.
+ ?LOG_DEBUG(
+ "Logging: changing primary log level to ~s", [Level],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ logger:set_primary_config(level, Level),
+
+ %% Per-module log level.
+ lists:foreach(
+ fun({Module, _}) ->
+ ?LOG_DEBUG(
+ "Logging: changing '~s' module log level to ~s",
+ [Module, Level],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ _ = logger:set_module_level(Module, Level)
+ end, logger:get_module_level()),
+
+ %% Per-handler log level.
+ %% In the handler, we have the "top-level" log level for that handler, plus
+ %% per-domain (per-category) levels inside a filter. We need to change all
+ %% of them to the new level.
+ lists:foreach(
+ fun
+ (#{id := Id, filters := Filters, config := Config}) ->
+ ?LOG_DEBUG(
+ "Logging: changing '~s' handler log level to ~s",
+ [Id, Level],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ Filters1 = lists:map(
+ fun
+ %% We only modify the filter we know about.
+ %% The argument to that filter is of the form:
+ %% #{CatName => Level}
+ %% (where CatName if an atom())
+ %%
+ %% We don't change the level for a category if
+ %% it is set to 'none' because it means the
+ %% category is filtered out by this filter.
+ ({?FILTER_NAME, {Fun, Arg}}) when is_map(Arg) ->
+ Arg1 = maps:map(
+ fun
+ (_, none) -> none;
+ (_, _) -> Level
+ end, Arg),
+ {?FILTER_NAME, {Fun, Arg1}};
+ %% We also change what we do with Erlang
+ %% progress reports.
+ ({progress_reports, {Fun, _}}) ->
+ Action = case Level of
+ debug -> log;
+ _ -> stop
+ end,
+ {progress_reports, {Fun, Action}};
+ %% Other filters are left untouched.
+ (Filter) ->
+ Filter
+ end, Filters),
+ %% If the log level is set to `debug', we turn off burst limit to
+ %% make sure all debug messages make it.
+ Config1 = adjust_burst_limit(Config, Level),
+ logger:set_handler_config(Id, filters, Filters1),
+ logger:set_handler_config(Id, config, Config1),
+ logger:set_handler_config(Id, level, Level),
+ ok;
+ (#{id := Id, config := Config}) ->
+ ?LOG_DEBUG(
+ "Logging: changing '~s' handler log level to ~s",
+ [Id, Level],
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}),
+ %% If the log level is set to `debug', we turn off burst limit to
+ %% make sure all debug messages make it.
+ Config1 = adjust_burst_limit(Config, Level),
+ logger:set_handler_config(Id, config, Config1),
+ logger:set_handler_config(Id, level, Level),
+ ok
+ end, logger:get_handler_config()),
+ ok.
+
+-spec log_locations() -> [file:filename() | string()].
+%% @doc
+%% Returns the list of output locations.
+%%
+%% For file-based handlers, the absolute filename is returned.
+%%
+%% For console-based handlers, a string literal is returned; either
+%% `"<stdout>"' or `"<stderr>"'.
+%%
+%% For exchange-based handlers, a string of the form `"exchange:Exchange"' is
+%% returned, where `Exchange' is the name of the exchange.
+%%
+%% For journald-based handlers, a string literal is returned; `"<journald>"'.
+%%
+%% For syslog-based handlers, a string of the form `"syslog:Hostname"' is
+%% returned, where `Hostname' is either the hostname of the remote syslog
+%% server, or an empty string if none were configured (which means log to
+%% localhost).
+%%
+%% @returns the list of output locations.
+%%
+%% @see log_location()
+
+log_locations() ->
+ Handlers = logger:get_handler_config(),
+ log_locations(Handlers, []).
+
+log_locations([#{module := Mod,
+ config := #{type := file,
+ file := Filename}} | Rest],
+ Locations)
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Locations1 = add_once(Locations, Filename),
+ log_locations(Rest, Locations1);
+log_locations([#{module := Mod,
+ config := #{type := standard_io}} | Rest],
+ Locations)
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Locations1 = add_once(Locations, "<stdout>"),
+ log_locations(Rest, Locations1);
+log_locations([#{module := Mod,
+ config := #{type := standard_error}} | Rest],
+ Locations)
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Locations1 = add_once(Locations, "<stderr>"),
+ log_locations(Rest, Locations1);
+log_locations([#{module := systemd_journal_h} | Rest],
+ Locations) ->
+ Locations1 = add_once(Locations, "<journald>"),
+ log_locations(Rest, Locations1);
+log_locations([#{module := syslog_logger_h} | Rest],
+ Locations) ->
+ Host = application:get_env(syslog, dest_host, ""),
+ Locations1 = add_once(
+ Locations,
+ rabbit_misc:format("syslog:~s", [Host])),
+ log_locations(Rest, Locations1);
+log_locations([#{module := rabbit_logger_exchange_h,
+ config := #{exchange := Exchange}} | Rest],
+ Locations) ->
+ Locations1 = add_once(
+ Locations,
+ rabbit_misc:format("exchange:~p", [Exchange])),
+ log_locations(Rest, Locations1);
+log_locations([_ | Rest], Locations) ->
+ log_locations(Rest, Locations);
+log_locations([], Locations) ->
+ lists:sort(Locations).
+
+add_once(Locations, Location) ->
+ case lists:member(Location, Locations) of
+ false -> [Location | Locations];
+ true -> Locations
+ end.
+
+%% -------------------------------------------------------------------
+%% ERL_CRASH_DUMP setting.
+%% -------------------------------------------------------------------
-set_ERL_CRASH_DUMP_envvar(#{log_base_dir := LogBaseDir}) ->
+-spec set_ERL_CRASH_DUMP_envvar(rabbit_env:context()) -> ok.
+
+set_ERL_CRASH_DUMP_envvar(Context) ->
case os:getenv("ERL_CRASH_DUMP") of
false ->
+ LogBaseDir = get_log_base_dir(Context),
ErlCrashDump = filename:join(LogBaseDir, "erl_crash.dump"),
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"Setting $ERL_CRASH_DUMP environment variable to \"~ts\"",
- [ErlCrashDump]),
+ [ErlCrashDump],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
os:putenv("ERL_CRASH_DUMP", ErlCrashDump),
ok;
ErlCrashDump ->
- rabbit_log_prelaunch:debug(
+ ?LOG_DEBUG(
"$ERL_CRASH_DUMP environment variable already set to \"~ts\"",
- [ErlCrashDump]),
+ [ErlCrashDump],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ok
+ end.
+
+-spec get_log_base_dir(rabbit_env:context()) -> file:filename().
+%% @doc
+%% Returns the log base directory.
+%%
+%% The precedence is:
+%% <ol>
+%% <li>the $RABBITMQ_LOG_BASE variable if overriden in the environment</li>
+%% <li>the value of `log_root' in the application environment</li>
+%% <li>the default value</li>
+%% </ol>
+%%
+%% @param Context the RabbitMQ context (see {@link
+%% //rabbitmq_prelaunch/rabbit_prelaunch:get_context/0}).
+%% @returns an absolute path to a directory.
+
+get_log_base_dir(#{log_base_dir := LogBaseDirFromEnv} = Context) ->
+ case rabbit_env:has_var_been_overridden(Context, log_base_dir) of
+ false -> application:get_env(rabbit, log_root, LogBaseDirFromEnv);
+ true -> LogBaseDirFromEnv
+ end.
+
+%% -------------------------------------------------------------------
+%% Logger's handlers configuration.
+%% -------------------------------------------------------------------
+
+-define(CONFIG_RUN_NUMBER_KEY, {?MODULE, config_run_number}).
+
+-spec compute_config_run_number() -> ok.
+%% @doc
+%% Compute the next configuration run number.
+%%
+%% We use a "configuration run number" to distinguish previously installed
+%% handlers (if any) from the ones we want to install in a subsequent call of
+%% {@link setup/1}.
+%%
+%% The configuration run number appears in the generated IDs for handlers.
+%% This is how we can filter old ones.
+%%
+%% @returns an positive integer.
+
+compute_config_run_number() ->
+ RunNum = persistent_term:get(?CONFIG_RUN_NUMBER_KEY, 0),
+ ok = persistent_term:put(?CONFIG_RUN_NUMBER_KEY, RunNum + 1).
+
+-spec get_config_run_number() -> pos_integer().
+
+get_config_run_number() ->
+ persistent_term:get(?CONFIG_RUN_NUMBER_KEY).
+
+-ifdef(TEST).
+-spec clear_config_run_number() -> ok.
+%% @doc
+%% Clears the recorded configuration run number.
+%%
+%% In testsuites, we want to be able to reset that number to 1 because
+%% testcases have expectations on handler IDs.
+
+clear_config_run_number() ->
+ _ = persistent_term:erase(?CONFIG_RUN_NUMBER_KEY),
+ ok.
+-endif.
+
+-spec configure_logger(rabbit_env:context()) -> ok.
+
+configure_logger(Context) ->
+ %% Configure main handlers.
+ %% We distinguish them by their type and possibly other
+ %% parameters (file name, syslog settings, etc.).
+ LogConfig0 = get_log_configuration_from_app_env(),
+ LogConfig1 = handle_default_and_overridden_outputs(LogConfig0, Context),
+ LogConfig2 = apply_log_levels_from_env(LogConfig1, Context),
+ LogConfig3 = make_filenames_absolute(LogConfig2, Context),
+ LogConfig4 = configure_formatters(LogConfig3, Context),
+
+ %% At this point, the log configuration is complete: we know the global
+ %% parameters as well as the per-category settings.
+ %%
+ %% Now, we turn that into a map of handlers. We use a map to deduplicate
+ %% handlers. For instance, if the same file is used for global logging and
+ %% a specific category.
+ %%
+ %% The map is then converted to a list, once the deduplication is done and
+ %% IDs are assigned to handlers.
+ Handlers = create_logger_handlers_conf(LogConfig4),
+ ?LOG_DEBUG(
+ "Logging: logger handlers:~n ~p", [Handlers],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+
+ %% We can now install the new handlers. The function takes care of
+ %% removing previously configured handlers (after installing the new
+ %% ones to ensure we don't loose a message).
+ ok = install_handlers(Handlers),
+
+ %% Let's log a message per log level (if debug logging is enabled). This
+ %% is handy if the user wants to verify the configuration is what he
+ %% expects.
+ ok = maybe_log_test_messages(LogConfig3).
+
+-spec get_log_configuration_from_app_env() -> log_config().
+
+get_log_configuration_from_app_env() ->
+ %% The log configuration in the Cuttlefish configuration file or the
+ %% application environment is not structured logically. This functions is
+ %% responsible for extracting the configuration and organize it. If one day
+ %% we decide to fix the configuration structure, we just have to modify
+ %% this function and normalize_*().
+ Env = get_log_app_env(),
+ EnvWithoutCats = proplists:delete(categories, Env),
+ DefaultAndCatProps = proplists:get_value(categories, Env, []),
+ DefaultProps = proplists:get_value(default, DefaultAndCatProps, []),
+ CatProps = proplists:delete(default, DefaultAndCatProps),
+
+ %% This "normalization" turns the RabbitMQ-specific configuration into a
+ %% structure which stores Logger handler configurations. That structure is
+ %% later modified to reach the final handler configurations.
+ PerCatConfig = maps:from_list(
+ [{Cat, normalize_per_cat_log_config(Props)}
+ || {Cat, Props} <- CatProps]),
+ GlobalConfig = normalize_main_log_config(EnvWithoutCats, DefaultProps),
+ #{global => GlobalConfig,
+ per_category => PerCatConfig}.
+
+-spec get_log_app_env() -> log_app_env().
+
+get_log_app_env() ->
+ application:get_env(rabbit, log, []).
+
+-spec normalize_main_log_config(main_log_env(), default_cat_env()) ->
+ global_log_config().
+
+normalize_main_log_config(Props, DefaultProps) ->
+ Outputs = case proplists:get_value(level, DefaultProps) of
+ undefined -> #{outputs => []};
+ Level -> #{outputs => [],
+ level => Level}
+ end,
+ Props1 = compute_implicitly_enabled_output(Props),
+ normalize_main_log_config1(Props1, Outputs).
+
+compute_implicitly_enabled_output(Props) ->
+ {ConsoleEnabled, Props1} = compute_implicitly_enabled_output(
+ console, Props),
+ {ExchangeEnabled, Props2} = compute_implicitly_enabled_output(
+ exchange, Props1),
+ {JournaldEnabled, Props3} = compute_implicitly_enabled_output(
+ journald, Props2),
+ {SyslogEnabled, Props4} = compute_implicitly_enabled_output(
+ syslog, Props3),
+ FileDisabledByDefault =
+ ConsoleEnabled orelse
+ ExchangeEnabled orelse
+ JournaldEnabled orelse
+ SyslogEnabled,
+
+ FileProps = proplists:get_value(file, Props4, []),
+ case is_output_explicitely_enabled(FileProps) of
+ true ->
+ Props4;
+ false ->
+ case FileDisabledByDefault of
+ true ->
+ FileProps1 = lists:keystore(
+ file, 1, FileProps, {file, false}),
+ lists:keystore(
+ file, 1, Props4, {file, FileProps1});
+ false ->
+ Props4
+ end
+ end.
+
+compute_implicitly_enabled_output(PropName, Props) ->
+ SubProps = proplists:get_value(PropName, Props, []),
+ {Enabled, SubProps1} = compute_implicitly_enabled_output1(SubProps),
+ {Enabled,
+ lists:keystore(PropName, 1, Props, {PropName, SubProps1})}.
+
+compute_implicitly_enabled_output1(SubProps) ->
+ %% We consider the output enabled or disabled if:
+ %% * it is explicitely marked as such, or
+ %% * the level is set to a log level (enabled) or `none' (disabled)
+ Enabled = proplists:get_value(
+ enabled, SubProps,
+ proplists:get_value(level, SubProps, none) =/= none),
+ {Enabled,
+ lists:keystore(enabled, 1, SubProps, {enabled, Enabled})}.
+
+is_output_explicitely_enabled(FileProps) ->
+ %% We consider the output enabled or disabled if:
+ %% * the file is explicitely set, or
+ %% * the level is set to a log level (enabled) or `none' (disabled)
+ File = proplists:get_value(file, FileProps),
+ Level = proplists:get_value(level, FileProps),
+ is_list(File) orelse (Level =/= undefined andalso Level =/= none).
+
+normalize_main_log_config1([{Type, Props} | Rest],
+ #{outputs := Outputs} = LogConfig) ->
+ Outputs1 = normalize_main_output(Type, Props, Outputs),
+ LogConfig1 = LogConfig#{outputs => Outputs1},
+ normalize_main_log_config1(Rest, LogConfig1);
+normalize_main_log_config1([], LogConfig) ->
+ LogConfig.
+
+-spec normalize_main_output
+(console, console_props(), [logger:handler_config()]) ->
+ [logger:handler_config()];
+(exchange, exchange_props(), [logger:handler_config()]) ->
+ [logger:handler_config()];
+(file, file_props(), [logger:handler_config()]) ->
+ [logger:handler_config()];
+(journald, journald_props(), [logger:handler_config()]) ->
+ [logger:handler_config()];
+(syslog, syslog_props(), [logger:handler_config()]) ->
+ [logger:handler_config()].
+
+normalize_main_output(console, Props, Outputs) ->
+ normalize_main_console_output(
+ Props,
+ #{module => rabbit_logger_std_h,
+ config => #{type => standard_io}},
+ Outputs);
+normalize_main_output(exchange, Props, Outputs) ->
+ normalize_main_exchange_output(
+ Props,
+ #{module => rabbit_logger_exchange_h,
+ config => #{}},
+ Outputs);
+normalize_main_output(file, Props, Outputs) ->
+ normalize_main_file_output(
+ Props,
+ #{module => rabbit_logger_std_h,
+ config => #{type => file}},
+ Outputs);
+normalize_main_output(journald, Props, Outputs) ->
+ normalize_main_journald_output(
+ Props,
+ #{module => systemd_journal_h,
+ config => #{}},
+ Outputs);
+normalize_main_output(syslog, Props, Outputs) ->
+ normalize_main_syslog_output(
+ Props,
+ #{module => syslog_logger_h,
+ config => #{}},
+ Outputs).
+
+-spec normalize_main_file_output(file_props(), logger:handler_config(),
+ [logger:handler_config()]) ->
+ [logger:handler_config()].
+
+normalize_main_file_output(Props, Output, Outputs) ->
+ Enabled = case proplists:get_value(file, Props) of
+ false -> false;
+ _ -> true
+ end,
+ case Enabled of
+ true -> normalize_main_file_output1(Props, Output, Outputs);
+ false -> remove_main_file_output(Outputs)
+ end.
+
+normalize_main_file_output1(
+ [{file, Filename} | Rest],
+ #{config := Config} = Output, Outputs) ->
+ Output1 = Output#{config => Config#{file => Filename}},
+ normalize_main_file_output1(Rest, Output1, Outputs);
+normalize_main_file_output1(
+ [{level, Level} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{level => Level},
+ normalize_main_file_output1(Rest, Output1, Outputs);
+normalize_main_file_output1(
+ [{date, DateSpec} | Rest],
+ #{config := Config} = Output, Outputs) ->
+ Output1 = Output#{config => Config#{rotate_on_date => DateSpec}},
+ normalize_main_file_output1(Rest, Output1, Outputs);
+normalize_main_file_output1(
+ [{compress, Compress} | Rest],
+ #{config := Config} = Output, Outputs) ->
+ Output1 = Output#{config => Config#{compress_on_rotate => Compress}},
+ normalize_main_file_output1(Rest, Output1, Outputs);
+normalize_main_file_output1(
+ [{size, Size} | Rest],
+ #{config := Config} = Output, Outputs) ->
+ Output1 = Output#{config => Config#{max_no_bytes => Size}},
+ normalize_main_file_output1(Rest, Output1, Outputs);
+normalize_main_file_output1(
+ [{count, Count} | Rest],
+ #{config := Config} = Output, Outputs) ->
+ Output1 = Output#{config => Config#{max_no_files => Count}},
+ normalize_main_file_output1(Rest, Output1, Outputs);
+normalize_main_file_output1(
+ [{formatter, undefined} | Rest],
+ Output, Outputs) ->
+ normalize_main_file_output1(Rest, Output, Outputs);
+normalize_main_file_output1(
+ [{formatter, Formatter} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{formatter => Formatter},
+ normalize_main_file_output1(Rest, Output1, Outputs);
+normalize_main_file_output1([], Output, Outputs) ->
+ [Output | Outputs].
+
+remove_main_file_output(Outputs) ->
+ lists:filter(
+ fun
+ (#{module := rabbit_logger_std_h,
+ config := #{type := file}}) -> false;
+ (_) -> true
+ end, Outputs).
+
+-spec normalize_main_console_output(console_props(), logger:handler_config(),
+ [logger:handler_config()]) ->
+ [logger:handler_config()].
+
+normalize_main_console_output(Props, Output, Outputs) ->
+ Enabled = proplists:get_value(enabled, Props),
+ case Enabled of
+ true -> normalize_main_console_output1(Props, Output, Outputs);
+ false -> remove_main_console_output(Output, Outputs)
+ end.
+
+normalize_main_console_output1(
+ [{enabled, true} | Rest],
+ Output, Outputs) ->
+ normalize_main_console_output1(Rest, Output, Outputs);
+normalize_main_console_output1(
+ [{level, Level} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{level => Level},
+ normalize_main_console_output1(Rest, Output1, Outputs);
+normalize_main_console_output1(
+ [{stdio, stdout} | Rest],
+ #{config := Config} = Output, Outputs) ->
+ Config1 = Config#{type => standard_io},
+ Output1 = Output#{config => Config1},
+ normalize_main_console_output1(Rest, Output1, Outputs);
+normalize_main_console_output1(
+ [{stdio, stderr} | Rest],
+ #{config := Config} = Output, Outputs) ->
+ Config1 = Config#{type => standard_error},
+ Output1 = Output#{config => Config1},
+ normalize_main_console_output1(Rest, Output1, Outputs);
+normalize_main_console_output1(
+ [{formatter, undefined} | Rest],
+ Output, Outputs) ->
+ normalize_main_console_output1(Rest, Output, Outputs);
+normalize_main_console_output1(
+ [{formatter, Formatter} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{formatter => Formatter},
+ normalize_main_console_output1(Rest, Output1, Outputs);
+normalize_main_console_output1([], Output, Outputs) ->
+ [Output | Outputs].
+
+remove_main_console_output(
+ #{module := Mod1, config := #{type := Stddev}},
+ Outputs)
+ when ?IS_STD_H_COMPAT(Mod1) andalso
+ ?IS_STDDEV(Stddev) ->
+ lists:filter(
+ fun
+ (#{module := Mod2,
+ config := #{type := standard_io}})
+ when ?IS_STD_H_COMPAT(Mod2) ->
+ false;
+ (#{module := Mod2,
+ config := #{type := standard_error}})
+ when ?IS_STD_H_COMPAT(Mod2) ->
+ false;
+ (_) ->
+ true
+ end, Outputs).
+
+-spec normalize_main_exchange_output(
+ exchange_props(), logger:handler_config(),
+ [logger:handler_config()]) ->
+ [logger:handler_config()].
+
+normalize_main_exchange_output(Props, Output, Outputs) ->
+ Enabled = proplists:get_value(enabled, Props),
+ case Enabled of
+ true -> normalize_main_exchange_output1(Props, Output, Outputs);
+ false -> remove_main_exchange_output(Output, Outputs)
+ end.
+
+normalize_main_exchange_output1(
+ [{enabled, true} | Rest],
+ Output, Outputs) ->
+ normalize_main_exchange_output1(Rest, Output, Outputs);
+normalize_main_exchange_output1(
+ [{level, Level} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{level => Level},
+ normalize_main_exchange_output1(Rest, Output1, Outputs);
+normalize_main_exchange_output1(
+ [{formatter, undefined} | Rest],
+ Output, Outputs) ->
+ normalize_main_exchange_output1(Rest, Output, Outputs);
+normalize_main_exchange_output1(
+ [{formatter, Formatter} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{formatter => Formatter},
+ normalize_main_exchange_output1(Rest, Output1, Outputs);
+normalize_main_exchange_output1([], Output, Outputs) ->
+ [Output | Outputs].
+
+remove_main_exchange_output(
+ #{module := rabbit_logger_exchange_h}, Outputs) ->
+ lists:filter(
+ fun
+ (#{module := rabbit_logger_exchange_h}) -> false;
+ (_) -> true
+ end, Outputs).
+
+-spec normalize_main_journald_output(journald_props(), logger:handler_config(),
+ [logger:handler_config()]) ->
+ [logger:handler_config()].
+
+normalize_main_journald_output(Props, Output, Outputs) ->
+ Enabled = proplists:get_value(enabled, Props),
+ case Enabled of
+ true -> normalize_main_journald_output1(Props, Output, Outputs);
+ false -> remove_main_journald_output(Output, Outputs)
+ end.
+
+normalize_main_journald_output1(
+ [{enabled, true} | Rest],
+ Output, Outputs) ->
+ normalize_main_journald_output1(Rest, Output, Outputs);
+normalize_main_journald_output1(
+ [{level, Level} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{level => Level},
+ normalize_main_journald_output1(Rest, Output1, Outputs);
+normalize_main_journald_output1(
+ [{fields, FieldMapping} | Rest],
+ #{config := Config} = Output, Outputs) ->
+ Config1 = Config#{fields => FieldMapping},
+ Output1 = Output#{config => Config1},
+ normalize_main_journald_output1(Rest, Output1, Outputs);
+normalize_main_journald_output1(
+ [{formatter, undefined} | Rest],
+ Output, Outputs) ->
+ normalize_main_journald_output1(Rest, Output, Outputs);
+normalize_main_journald_output1(
+ [{formatter, Formatter} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{formatter => Formatter},
+ normalize_main_journald_output1(Rest, Output1, Outputs);
+normalize_main_journald_output1([], Output, Outputs) ->
+ [Output | Outputs].
+
+remove_main_journald_output(
+ #{module := systemd_journal_h},
+ Outputs) ->
+ lists:filter(
+ fun
+ (#{module := systemd_journal_h}) -> false;
+ (_) -> true
+ end, Outputs).
+
+-spec normalize_main_syslog_output(
+ syslog_props(), logger:handler_config(),
+ [logger:handler_config()]) ->
+ [logger:handler_config()].
+
+normalize_main_syslog_output(Props, Output, Outputs) ->
+ Enabled = proplists:get_value(enabled, Props),
+ case Enabled of
+ true -> normalize_main_syslog_output1(Props, Output, Outputs);
+ false -> remove_main_syslog_output(Output, Outputs)
+ end.
+
+normalize_main_syslog_output1(
+ [{enabled, true} | Rest],
+ Output, Outputs) ->
+ normalize_main_syslog_output1(Rest, Output, Outputs);
+normalize_main_syslog_output1(
+ [{level, Level} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{level => Level},
+ normalize_main_syslog_output1(Rest, Output1, Outputs);
+normalize_main_syslog_output1(
+ [{formatter, undefined} | Rest],
+ Output, Outputs) ->
+ normalize_main_syslog_output1(Rest, Output, Outputs);
+normalize_main_syslog_output1(
+ [{formatter, Formatter} | Rest],
+ Output, Outputs) ->
+ Output1 = Output#{formatter => Formatter},
+ normalize_main_syslog_output1(Rest, Output1, Outputs);
+normalize_main_syslog_output1([], Output, Outputs) ->
+ [Output | Outputs].
+
+remove_main_syslog_output(
+ #{module := syslog_logger_h}, Outputs) ->
+ lists:filter(
+ fun
+ (#{module := syslog_logger_h}) -> false;
+ (_) -> true
+ end, Outputs).
+
+-spec normalize_per_cat_log_config(per_cat_env()) -> per_cat_log_config().
+
+normalize_per_cat_log_config(Props) ->
+ normalize_per_cat_log_config(Props, #{outputs => []}).
+
+normalize_per_cat_log_config([{level, Level} | Rest], LogConfig) ->
+ LogConfig1 = LogConfig#{level => Level},
+ normalize_per_cat_log_config(Rest, LogConfig1);
+normalize_per_cat_log_config([{file, Filename} | Rest],
+ #{outputs := Outputs} = LogConfig) ->
+ %% Caution: The `file' property in the per-category configuration only
+ %% accepts a filename. It doesn't support the properties of the `file'
+ %% property at the global configuration level.
+ Output = #{module => rabbit_logger_std_h,
+ config => #{type => file,
+ file => Filename}},
+ LogConfig1 = LogConfig#{outputs => [Output | Outputs]},
+ normalize_per_cat_log_config(Rest, LogConfig1);
+normalize_per_cat_log_config([], LogConfig) ->
+ LogConfig.
+
+-spec handle_default_and_overridden_outputs(log_config(),
+ rabbit_env:context()) ->
+ log_config().
+
+handle_default_and_overridden_outputs(LogConfig, Context) ->
+ LogConfig1 = handle_default_main_output(LogConfig, Context),
+ LogConfig2 = handle_default_upgrade_cat_output(LogConfig1, Context),
+ LogConfig2.
+
+-spec handle_default_main_output(log_config(), rabbit_env:context()) ->
+ log_config().
+
+handle_default_main_output(
+ #{global := #{outputs := Outputs} = GlobalConfig} = LogConfig,
+ #{main_log_file := MainLogFile} = Context) ->
+ NoOutputsConfigured = Outputs =:= [],
+ Overridden = rabbit_env:has_var_been_overridden(Context, main_log_file),
+ Outputs1 = if
+ NoOutputsConfigured orelse Overridden ->
+ Output0 = log_file_var_to_output(MainLogFile),
+ Output1 = keep_log_level_from_equivalent_output(
+ Output0, Outputs),
+ [Output1];
+ true ->
+ [case Output of
+ #{module := Mod,
+ config := #{type := file, file := _}}
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Output;
+ #{module := Mod,
+ config := #{type := file} = Config}
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Output#{config =>
+ Config#{file => MainLogFile}};
+ _ ->
+ Output
+ end || Output <- Outputs]
+ end,
+ case Outputs1 of
+ Outputs -> LogConfig;
+ _ -> LogConfig#{
+ global => GlobalConfig#{
+ outputs => Outputs1}}
+ end.
+
+-spec handle_default_upgrade_cat_output(log_config(), rabbit_env:context()) ->
+ log_config().
+
+handle_default_upgrade_cat_output(
+ #{per_category := PerCatConfig} = LogConfig,
+ #{upgrade_log_file := UpgLogFile} = Context) ->
+ UpgCatConfig = case PerCatConfig of
+ #{upgrade := CatConfig} -> CatConfig;
+ _ -> #{outputs => []}
+ end,
+ #{outputs := Outputs} = UpgCatConfig,
+ NoOutputsConfigured = Outputs =:= [],
+ Overridden = rabbit_env:has_var_been_overridden(
+ Context, upgrade_log_file),
+ Outputs1 = if
+ NoOutputsConfigured orelse Overridden ->
+ Output0 = log_file_var_to_output(UpgLogFile),
+ Output1 = keep_log_level_from_equivalent_output(
+ Output0, Outputs),
+ [Output1];
+ true ->
+ Outputs
+ end,
+ case Outputs1 of
+ Outputs -> LogConfig;
+ _ -> LogConfig#{
+ per_category => PerCatConfig#{
+ upgrade => UpgCatConfig#{
+ outputs => Outputs1}}}
+ end.
+
+-spec log_file_var_to_output(file:filename() | string()) ->
+ logger:handler_config().
+
+log_file_var_to_output("-") ->
+ #{module => rabbit_logger_std_h,
+ config => #{type => standard_io}};
+log_file_var_to_output("-stderr") ->
+ #{module => rabbit_logger_std_h,
+ config => #{type => standard_error}};
+log_file_var_to_output("exchange:" ++ _) ->
+ #{module => rabbit_logger_exchange_h,
+ config => #{}};
+log_file_var_to_output("journald:" ++ _) ->
+ #{module => systemd_journal_h,
+ config => #{}};
+log_file_var_to_output("syslog:" ++ _) ->
+ #{module => syslog_logger_h,
+ config => #{}};
+log_file_var_to_output(Filename) ->
+ #{module => rabbit_logger_std_h,
+ config => #{type => file,
+ file => Filename}}.
+
+-spec keep_log_level_from_equivalent_output(
+ logger:handler_config(), [logger:handler_config()]) ->
+ logger:handler_config().
+%% @doc
+%% Keeps the log level from the equivalent output if found in the given list of
+%% outputs.
+%%
+%% If the output is overridden from the environment, or if no output is
+%% configured at all (and the default output is used), we should still keep the
+%% log level set in the configuration. The idea is that the $RABBITMQ_LOGS
+%% environment variable only overrides the output, not its log level (which
+%% would be set in $RABBITMQ_LOG).
+%%
+%% Here is an example of when it is used:
+%% * "$RABBITMQ_LOGS=-" is set in the environment
+%% * "log.console.level = debug" is set in the configuration file
+
+keep_log_level_from_equivalent_output(
+ #{module := Mod, config := #{type := Type}} = Output,
+ [#{module := Mod, config := #{type := Type}} = OverridenOutput | _])
+ when ?IS_STD_H_COMPAT(Mod) ->
+ keep_log_level_from_equivalent_output1(Output, OverridenOutput);
+keep_log_level_from_equivalent_output(
+ #{module := Mod} = Output,
+ [#{module := Mod} = OverridenOutput | _]) ->
+ keep_log_level_from_equivalent_output1(Output, OverridenOutput);
+keep_log_level_from_equivalent_output(Output, [_ | Rest]) ->
+ keep_log_level_from_equivalent_output(Output, Rest);
+keep_log_level_from_equivalent_output(Output, []) ->
+ Output.
+
+-spec keep_log_level_from_equivalent_output1(
+ logger:handler_config(), logger:handler_config()) ->
+ logger:handler_config().
+
+keep_log_level_from_equivalent_output1(Output, #{level := Level}) ->
+ Output#{level => Level};
+keep_log_level_from_equivalent_output1(Output, _) ->
+ Output.
+
+-spec apply_log_levels_from_env(log_config(), rabbit_env:context()) ->
+ log_config().
+
+apply_log_levels_from_env(LogConfig, #{log_levels := LogLevels})
+ when is_map(LogLevels) ->
+ %% `LogLevels' comes from the `$RABBITMQ_LOG' environment variable. It has
+ %% the following form:
+ %% RABBITMQ_LOG=$cat1=$level1,$cat2=$level2,+color,-json
+ %% I.e. it contains either `category=level' or `+flag'/`-flag'.
+ %%
+ %% Here we want to apply the log levels set from that variable, but we
+ %% need to filter out any flags.
+ maps:fold(
+ fun
+ (_, Value, LC) when is_boolean(Value) ->
+ %% Ignore flags such as '+color' and '+json'.
+ LC;
+ (global, Level, #{global := GlobalConfig} = LC) ->
+ GlobalConfig1 = GlobalConfig#{level => Level},
+ LC#{global => GlobalConfig1};
+ (CatString, Level, #{per_category := PerCatConfig} = LC) ->
+ CatAtom = list_to_atom(CatString),
+ CatConfig0 = maps:get(CatAtom, PerCatConfig, #{outputs => []}),
+ CatConfig1 = CatConfig0#{level => Level},
+ PerCatConfig1 = PerCatConfig#{CatAtom => CatConfig1},
+ LC#{per_category => PerCatConfig1}
+ end, LogConfig, LogLevels);
+apply_log_levels_from_env(LogConfig, _) ->
+ LogConfig.
+
+-spec make_filenames_absolute(log_config(), rabbit_env:context()) ->
+ log_config().
+
+make_filenames_absolute(
+ #{global := GlobalConfig, per_category := PerCatConfig} = LogConfig,
+ Context) ->
+ LogBaseDir = get_log_base_dir(Context),
+ GlobalConfig1 = make_filenames_absolute1(GlobalConfig, LogBaseDir),
+ PerCatConfig1 = maps:map(
+ fun(_, CatConfig) ->
+ make_filenames_absolute1(CatConfig, LogBaseDir)
+ end, PerCatConfig),
+ LogConfig#{global => GlobalConfig1, per_category => PerCatConfig1}.
+
+make_filenames_absolute1(#{outputs := Outputs} = Config, LogBaseDir) ->
+ Outputs1 = lists:map(
+ fun
+ (#{module := Mod,
+ config := #{type := file,
+ file := Filename} = Cfg} = Output)
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Cfg1 = Cfg#{file => filename:absname(
+ Filename, LogBaseDir)},
+ Output#{config => Cfg1};
+ (Output) ->
+ Output
+ end, Outputs),
+ Config#{outputs => Outputs1}.
+
+-spec configure_formatters(log_config(), rabbit_env:context()) ->
+ log_config().
+
+configure_formatters(
+ #{global := GlobalConfig, per_category := PerCatConfig} = LogConfig,
+ Context) ->
+ GlobalConfig1 = configure_formatters1(GlobalConfig, Context),
+ PerCatConfig1 = maps:map(
+ fun(_, CatConfig) ->
+ configure_formatters1(CatConfig, Context)
+ end, PerCatConfig),
+ LogConfig#{global => GlobalConfig1, per_category => PerCatConfig1}.
+
+configure_formatters1(#{outputs := Outputs} = Config, Context) ->
+ %% TODO: Add ability to configure formatters from the Cuttlefish
+ %% configuration file. For now, it is only possible from the
+ %% `$RABBITMQ_LOG' environment variable.
+ ConsFormatter =
+ rabbit_prelaunch_early_logging:default_console_formatter(Context),
+ FileFormatter =
+ rabbit_prelaunch_early_logging:default_file_formatter(Context),
+ JournaldFormatter =
+ rabbit_prelaunch_early_logging:default_journald_formatter(Context),
+ SyslogFormatter =
+ rabbit_prelaunch_early_logging:default_syslog_formatter(Context),
+ Outputs1 = lists:map(
+ fun
+ (#{module := Mod,
+ config := #{type := Stddev}} = Output)
+ when ?IS_STD_H_COMPAT(Mod) andalso
+ ?IS_STDDEV(Stddev) ->
+ case maps:is_key(formatter, Output) of
+ true -> Output;
+ false -> Output#{formatter => ConsFormatter}
+ end;
+ (#{module := systemd_journal_h} = Output) ->
+ case maps:is_key(formatter, Output) of
+ true -> Output;
+ false -> Output#{formatter => JournaldFormatter}
+ end;
+ (#{module := syslog_logger_h} = Output) ->
+ case maps:is_key(formatter, Output) of
+ true -> Output;
+ false -> Output#{formatter => SyslogFormatter}
+ end;
+ (Output) ->
+ case maps:is_key(formatter, Output) of
+ true -> Output;
+ false -> Output#{formatter => FileFormatter}
+ end
+ end, Outputs),
+ Config#{outputs => Outputs1}.
+
+-spec create_logger_handlers_conf(log_config()) ->
+ [logger:handler_config()].
+
+create_logger_handlers_conf(
+ #{global := GlobalConfig, per_category := PerCatConfig}) ->
+ Handlers0 = create_global_handlers_conf(GlobalConfig),
+ Handlers1 = create_per_cat_handlers_conf(PerCatConfig, Handlers0),
+ Handlers2 = adjust_log_levels(Handlers1),
+
+ %% assign_handler_ids/1 is also responsible for transforming the map of
+ %% handlers into a list. The map was only used to deduplicate handlers.
+ assign_handler_ids(Handlers2).
+
+-spec create_global_handlers_conf(global_log_config()) ->
+ #{handler_key() := logger:handler_config()}.
+
+create_global_handlers_conf(#{outputs := Outputs} = GlobalConfig) ->
+ Handlers = ensure_handlers_conf(Outputs, global, GlobalConfig, #{}),
+ maps:map(
+ fun(_, Handler) ->
+ add_erlang_specific_filters(Handler)
+ end, Handlers).
+
+-spec add_erlang_specific_filters(logger:handler_config()) ->
+ logger:handler_config().
+
+add_erlang_specific_filters(#{filters := Filters} = Handler) ->
+ %% We only log progress reports (from application master and supervisor)
+ %% only if the handler level is set to debug.
+ Action = case Handler of
+ #{level := debug} -> log;
+ _ -> stop
+ end,
+ Filters1 = [{progress_reports, {fun logger_filters:progress/2, Action}}
+ | Filters],
+ Handler#{filters => Filters1}.
+
+-spec create_per_cat_handlers_conf(
+ #{category_name() => per_cat_log_config()},
+ #{handler_key() => logger:handler_config()}) ->
+ #{handler_key() => logger:handler_config()}.
+
+create_per_cat_handlers_conf(PerCatConfig, Handlers) ->
+ maps:fold(
+ fun
+ (CatName, #{outputs := []} = CatConfig, Hdls) ->
+ %% That category has no outputs defined. It means its messages
+ %% will go to the global handlers. We still need to update
+ %% global handlers to filter the level of the messages from that
+ %% category.
+ filter_cat_in_global_handlers(Hdls, CatName, CatConfig);
+ (CatName, #{outputs := Outputs} = CatConfig, Hdls) ->
+ %% That category has specific outputs (i.e. in addition to the
+ %% global handlers).
+ Hdls1 = ensure_handlers_conf(Outputs, CatName, CatConfig, Hdls),
+ %% We need to filter the messages from that category out in the
+ %% global handlers.
+ filter_out_cat_in_global_handlers(Hdls1, CatName)
+ end, Handlers, PerCatConfig).
+
+-spec ensure_handlers_conf(
+ [logger:handler_config()], global | category_name(),
+ global_log_config() | per_cat_log_config(),
+ #{handler_key() => logger:handler_config()}) ->
+ #{handler_key() => logger:handler_config()}.
+
+ensure_handlers_conf([Output | Rest], CatName, Config, Handlers) ->
+ Key = create_handler_key(Output),
+ %% This is where the deduplication happens: either we update the existing
+ %% handler (based on the key computed above) or we create a new one.
+ Handler = case maps:is_key(Key, Handlers) of
+ false -> create_handler_conf(Output, CatName, Config);
+ true -> update_handler_conf(maps:get(Key, Handlers),
+ CatName, Output)
+ end,
+ Handlers1 = Handlers#{Key => Handler},
+ ensure_handlers_conf(Rest, CatName, Config, Handlers1);
+ensure_handlers_conf([], _, _, Handlers) ->
+ Handlers.
+
+-spec create_handler_key(logger:handler_config()) -> handler_key().
+
+create_handler_key(
+ #{module := Mod, config := #{type := file, file := Filename}})
+ when ?IS_STD_H_COMPAT(Mod) ->
+ {file, Filename};
+create_handler_key(
+ #{module := Mod, config := #{type := standard_io}})
+ when ?IS_STD_H_COMPAT(Mod) ->
+ {console, standard_io};
+create_handler_key(
+ #{module := Mod, config := #{type := standard_error}})
+ when ?IS_STD_H_COMPAT(Mod) ->
+ {console, standard_error};
+create_handler_key(
+ #{module := rabbit_logger_exchange_h}) ->
+ exchange;
+create_handler_key(
+ #{module := systemd_journal_h}) ->
+ journald;
+create_handler_key(
+ #{module := syslog_logger_h}) ->
+ syslog.
+
+-spec create_handler_conf(
+ logger:handler_config(), global | category_name(),
+ global_log_config() | per_cat_log_config()) ->
+ logger:handler_config().
+
+%% The difference between a global handler and a category handler is the value
+%% of `filter_default'. In a global hanler, if a message was not stopped or
+%% explicitely accepted by a filter, the message is logged. In a category
+%% handler, it is dropped.
+
+create_handler_conf(Output, global, Config) ->
+ Level = compute_level_from_config_and_output(Config, Output),
+ Output#{level => Level,
+ filter_default => log,
+ filters => [{?FILTER_NAME,
+ {fun filter_log_event/2, #{global => Level}}}]};
+create_handler_conf(Output, CatName, Config) ->
+ Level = compute_level_from_config_and_output(Config, Output),
+ Output#{level => Level,
+ filter_default => stop,
+ filters => [{?FILTER_NAME,
+ {fun filter_log_event/2, #{CatName => Level}}}]}.
+
+-spec update_handler_conf(
+ logger:handler_config(), global | category_name(),
+ logger:handler_config()) ->
+ logger:handler_config().
+
+update_handler_conf(
+ #{level := ConfiguredLevel} = Handler, global, Output) ->
+ case Output of
+ #{level := NewLevel} ->
+ Handler#{level =>
+ get_less_severe_level(NewLevel, ConfiguredLevel)};
+ _ ->
+ Handler
+ end;
+update_handler_conf(Handler, CatName, Output) ->
+ add_cat_filter(Handler, CatName, Output).
+
+-spec compute_level_from_config_and_output(
+ global_log_config() | per_cat_log_config(),
+ logger:handler_config()) ->
+ logger:level().
+%% @doc
+%% Compute the debug level for a handler.
+%%
+%% The precedence is:
+%% <ol>
+%% <li>the level of the output</li>
+%% <li>the level of the category (or the global level)</li>
+%% <li>the default value</li>
+%% </ol>
+
+compute_level_from_config_and_output(Config, Output) ->
+ case Output of
+ #{level := Level} ->
+ Level;
+ _ ->
+ case Config of
+ #{level := Level} -> Level;
+ _ -> ?DEFAULT_LOG_LEVEL
+ end
+ end.
+
+-spec filter_cat_in_global_handlers(
+ #{handler_key() => logger:handler_config()}, category_name(),
+ per_cat_log_config()) ->
+ #{handler_key() => logger:handler_config()}.
+
+filter_cat_in_global_handlers(Handlers, CatName, CatConfig) ->
+ maps:map(
+ fun
+ (_, #{filter_default := log} = Handler) ->
+ add_cat_filter(Handler, CatName, CatConfig);
+ (_, Handler) ->
+ Handler
+ end, Handlers).
+
+-spec filter_out_cat_in_global_handlers(
+ #{handler_key() => logger:handler_config()}, category_name()) ->
+ #{handler_key() => logger:handler_config()}.
+
+filter_out_cat_in_global_handlers(Handlers, CatName) ->
+ maps:map(
+ fun
+ (_, #{filter_default := log, filters := Filters} = Handler) ->
+ {_, FilterConfig} = proplists:get_value(?FILTER_NAME, Filters),
+ case maps:is_key(CatName, FilterConfig) of
+ true -> Handler;
+ false -> add_cat_filter(Handler, CatName, #{level => none,
+ outputs => []})
+ end;
+ (_, Handler) ->
+ Handler
+ end, Handlers).
+
+-spec add_cat_filter(
+ logger:handler_config(), category_name(),
+ per_cat_log_config() | logger:handler_config()) ->
+ logger:handler_config().
+
+add_cat_filter(Handler, CatName, CatConfigOrOutput) ->
+ Level = case CatConfigOrOutput of
+ #{level := L} -> L;
+ _ -> maps:get(level, Handler)
+ end,
+ do_add_cat_filter(Handler, CatName, Level).
+
+do_add_cat_filter(#{filters := Filters} = Handler, CatName, Level) ->
+ {Fun, FilterConfig} = proplists:get_value(?FILTER_NAME, Filters),
+ FilterConfig1 = FilterConfig#{CatName => Level},
+ Filters1 = lists:keystore(?FILTER_NAME, 1, Filters,
+ {?FILTER_NAME, {Fun, FilterConfig1}}),
+ Handler#{filters => Filters1}.
+
+-spec filter_log_event(logger:log_event(), term()) -> logger:filter_return().
+
+filter_log_event(LogEvent, FilterConfig) ->
+ rabbit_prelaunch_early_logging:filter_log_event(LogEvent, FilterConfig).
+
+-spec adjust_log_levels(#{handler_key() => logger:handler_config()}) ->
+ #{handler_key() => logger:handler_config()}.
+%% @doc
+%% Adjust handler log level based on the filters' level.
+%%
+%% If a filter is more permissive, we need to adapt the handler log level so
+%% the message makes it to the filter.
+%%
+%% Also, if the log level is set to `debug', we turn off burst limit to make
+%% sure all debug messages make it.
+
+adjust_log_levels(Handlers) ->
+ maps:map(
+ fun(_, #{level := GeneralLevel, filters := Filters} = Handler) ->
+ {_, FilterConfig} = proplists:get_value(?FILTER_NAME, Filters),
+ Level = maps:fold(
+ fun(_, LvlA, LvlB) ->
+ get_less_severe_level(LvlA, LvlB)
+ end, GeneralLevel, FilterConfig),
+ Handler1 = Handler#{level => Level},
+ adjust_burst_limit(Handler1)
+ end, Handlers).
+
+adjust_burst_limit(#{config := #{burst_limit_enable := _}} = Handler) ->
+ Handler;
+adjust_burst_limit(#{level := debug, config := Config} = Handler) ->
+ Config1 = Config#{burst_limit_enable => false},
+ Handler#{config => Config1};
+adjust_burst_limit(Handler) when is_map(Handler) ->
+ Handler.
+
+adjust_burst_limit(Config, Level) ->
+ Config#{burst_limit_enable => Level =/= debug}.
+
+-spec assign_handler_ids(#{handler_key() => logger:handler_config()}) ->
+ [logger:handler_config()].
+
+assign_handler_ids(Handlers) ->
+ Handlers1 = [maps:get(Key, Handlers)
+ || Key <- lists:sort(maps:keys(Handlers))],
+ assign_handler_ids(Handlers1,
+ #{config_run_number => get_config_run_number(),
+ next_file => 1},
+ []).
+
+-spec assign_handler_ids(
+ [logger:handler_config()], id_assignment_state(),
+ [logger:handler_config()]) ->
+ [logger:handler_config()].
+
+assign_handler_ids(
+ [#{module := Mod, config := #{type := file}} = Handler | Rest],
+ #{next_file := NextFile} = State,
+ Result)
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Id = format_id("file_~b", [NextFile], State),
+ Handler1 = Handler#{id => Id},
+ assign_handler_ids(
+ Rest, State#{next_file => NextFile + 1}, [Handler1 | Result]);
+assign_handler_ids(
+ [#{module := Mod, config := #{type := standard_io}} = Handler | Rest],
+ State,
+ Result)
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Id = format_id("stdout", [], State),
+ Handler1 = Handler#{id => Id},
+ assign_handler_ids(Rest, State, [Handler1 | Result]);
+assign_handler_ids(
+ [#{module := Mod, config := #{type := standard_error}} = Handler | Rest],
+ State,
+ Result)
+ when ?IS_STD_H_COMPAT(Mod) ->
+ Id = format_id("stderr", [], State),
+ Handler1 = Handler#{id => Id},
+ assign_handler_ids(Rest, State, [Handler1 | Result]);
+assign_handler_ids(
+ [#{module := rabbit_logger_exchange_h} = Handler
+ | Rest],
+ State,
+ Result) ->
+ Id = format_id("exchange", [], State),
+ Handler1 = Handler#{id => Id},
+ assign_handler_ids(Rest, State, [Handler1 | Result]);
+assign_handler_ids(
+ [#{module := systemd_journal_h} = Handler
+ | Rest],
+ State,
+ Result) ->
+ Id = format_id("journald", [], State),
+ Handler1 = Handler#{id => Id},
+ assign_handler_ids(Rest, State, [Handler1 | Result]);
+assign_handler_ids(
+ [#{module := syslog_logger_h} = Handler
+ | Rest],
+ State,
+ Result) ->
+ Id = format_id("syslog", [], State),
+ Handler1 = Handler#{id => Id},
+ assign_handler_ids(Rest, State, [Handler1 | Result]);
+assign_handler_ids([], _, Result) ->
+ lists:reverse(Result).
+
+-spec format_id(io:format(), [term()], id_assignment_state()) ->
+ logger:handler_id().
+
+format_id(Format, Args, #{config_run_number := RunNum}) ->
+ list_to_atom(rabbit_misc:format("rmq_~b_" ++ Format, [RunNum | Args])).
+
+-spec install_handlers([logger:handler_config()]) -> ok | no_return().
+
+install_handlers([]) ->
+ ok;
+install_handlers(Handlers) ->
+ case adjust_running_dependencies(Handlers) of
+ ok ->
+ ?LOG_NOTICE(
+ "Logging: switching to configured handler(s); following "
+ "messages may not be visible in this log output",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ok = do_install_handlers(Handlers),
+ ok = remove_old_handlers(),
+ ok = define_primary_level(Handlers),
+ ?LOG_NOTICE(
+ "Logging: configured log handlers are now ACTIVE",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH});
+ _ ->
+ ?LOG_NOTICE(
+ "Logging: failed to configure log handlers; keeping existing "
+ "handlers",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok
end.
-configure_lager(#{log_base_dir := LogBaseDir,
- main_log_file := MainLog,
- upgrade_log_file := UpgradeLog} = Context) ->
- {SaslErrorLogger,
- MainLagerHandler,
- UpgradeLagerHandler} = case MainLog of
- "-" ->
- %% Log to STDOUT.
- rabbit_log_prelaunch:debug(
- "Logging to stdout"),
- {tty,
- tty,
- tty};
- _ ->
- rabbit_log_prelaunch:debug(
- "Logging to:"),
- [rabbit_log_prelaunch:debug(
- " - ~ts", [Log])
- || Log <- [MainLog, UpgradeLog]],
- %% Log to file.
- {false,
- MainLog,
- UpgradeLog}
- end,
-
- ok = application:set_env(lager, crash_log, "log/crash.log"),
-
- Fun = fun({App, Var, Value}) ->
- case application:get_env(App, Var) of
- undefined -> ok = application:set_env(App, Var, Value);
- _ -> ok
- end
- end,
- Vars = [{sasl, sasl_error_logger, SaslErrorLogger},
- {rabbit, lager_log_root, LogBaseDir},
- {rabbit, lager_default_file, MainLagerHandler},
- {rabbit, lager_upgrade_file, UpgradeLagerHandler}],
- lists:foreach(Fun, Vars),
-
- ok = rabbit_lager:start_logger(),
-
- ok = rabbit_prelaunch_early_logging:setup_early_logging(Context, false).
+-spec adjust_running_dependencies([logger:handler_config()]) -> ok | error.
+
+adjust_running_dependencies(Handlers) ->
+ %% Based on the log handlers' module, we determine the list of applications
+ %% they depend on.
+ %%
+ %% The DefaultDeps lists all possible dependencies and marked them as
+ %% unneeded. Then, if we have a log handler which depends on one of them,
+ %% it is marked as needed. This way, we know what needs to be started AND
+ %% stopped.
+ %%
+ %% DefaultDeps is of the form `#{ApplicationName => Needed}'.
+ DefaultDeps = #{syslog => false},
+ Deps = lists:foldl(
+ fun
+ (#{module := syslog_logger_h}, Acc) -> Acc#{syslog => true};
+ (_, Acc) -> Acc
+ end, DefaultDeps, Handlers),
+ adjust_running_dependencies1(maps:to_list(Deps)).
+
+-spec adjust_running_dependencies1([{atom(), boolean()}]) -> ok | error.
+
+adjust_running_dependencies1([{App, true} | Rest]) ->
+ ?LOG_DEBUG(
+ "Logging: ensure log handler dependency '~s' is started", [App],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ case application:ensure_all_started(App) of
+ {ok, _} ->
+ adjust_running_dependencies1(Rest);
+ {error, Reason} ->
+ ?LOG_ERROR(
+ "Failed to start log handlers dependency '~s': ~p",
+ [App, Reason],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ error
+ end;
+adjust_running_dependencies1([{App, false} | Rest]) ->
+ ?LOG_DEBUG(
+ "Logging: ensure log handler dependency '~s' is stopped", [App],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ case application:stop(App) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ ?LOG_NOTICE(
+ "Logging: failed to stop log handlers dependency '~s': ~p",
+ [App, Reason],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})
+ end,
+ adjust_running_dependencies1(Rest);
+adjust_running_dependencies1([]) ->
+ ok.
+
+-spec do_install_handlers([logger:handler_config()]) -> ok | no_return().
+
+do_install_handlers([#{id := Id, module := Module} = Handler | Rest]) ->
+ case logger:add_handler(Id, Module, Handler) of
+ ok ->
+ ok = remove_syslog_logger_h_hardcoded_filters(Handler),
+ do_install_handlers(Rest);
+ {error, {handler_not_added, {open_failed, Filename, Reason}}} ->
+ throw({error, {cannot_log_to_file, Filename, Reason}});
+ {error, {handler_not_added, Reason}} ->
+ throw({error, {cannot_log_to_file, unknown, Reason}})
+ end;
+do_install_handlers([]) ->
+ ok.
+
+remove_syslog_logger_h_hardcoded_filters(
+ #{id := Id, module := syslog_logger_h}) ->
+ _ = logger:remove_handler_filter(Id, progress),
+ _ = logger:remove_handler_filter(Id, remote_gl),
+ ok;
+remove_syslog_logger_h_hardcoded_filters(_) ->
+ ok.
+
+-spec remove_old_handlers() -> ok.
+
+remove_old_handlers() ->
+ _ = logger:remove_handler(default),
+ RunNum = get_config_run_number(),
+ lists:foreach(
+ fun(Id) ->
+ Ret = re:run(atom_to_list(Id), "^rmq_([0-9]+)_",
+ [{capture, all_but_first, list}]),
+ case Ret of
+ {match, [NumStr]} ->
+ Num = erlang:list_to_integer(NumStr),
+ if
+ Num < RunNum ->
+ ?LOG_DEBUG(
+ "Logging: removing old logger handler ~s",
+ [Id],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ok = logger:remove_handler(Id);
+ true ->
+ ok
+ end;
+ _ ->
+ ok
+ end
+ end, lists:sort(logger:get_handler_ids())),
+ ok.
+
+-spec define_primary_level([logger:handler_config()]) ->
+ ok | {error, term()}.
+
+define_primary_level(Handlers) ->
+ define_primary_level(Handlers, emergency).
+
+-spec define_primary_level([logger:handler_config()], logger:level()) ->
+ ok | {error, term()}.
+
+define_primary_level([#{level := Level} | Rest], PrimaryLevel) ->
+ NewLevel = get_less_severe_level(Level, PrimaryLevel),
+ define_primary_level(Rest, NewLevel);
+define_primary_level([], PrimaryLevel) ->
+ logger:set_primary_config(level, PrimaryLevel).
+
+-spec get_less_severe_level(logger:level(), logger:level()) -> logger:level().
+%% @doc
+%% Compares two log levels and returns the less severe one.
+%%
+%% @param LevelA the log level to compare to LevelB.
+%% @param LevelB the log level to compare to LevelA.
+%%
+%% @returns the less severe log level.
+
+get_less_severe_level(LevelA, LevelB) ->
+ case logger:compare_levels(LevelA, LevelB) of
+ lt -> LevelA;
+ _ -> LevelB
+ end.
+
+-spec maybe_log_test_messages(log_config()) -> ok.
+
+maybe_log_test_messages(
+ #{per_category := #{prelaunch := #{level := debug}}}) ->
+ log_test_messages();
+maybe_log_test_messages(
+ #{global := #{level := debug}}) ->
+ log_test_messages();
+maybe_log_test_messages(_) ->
+ ok.
+
+-spec log_test_messages() -> ok.
+
+log_test_messages() ->
+ ?LOG_DEBUG("Logging: testing debug log level",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_INFO("Logging: testing info log level",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_NOTICE("Logging: testing notice log level",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_WARNING("Logging: testing warning log level",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_ERROR("Logging: testing error log level",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_CRITICAL("Logging: testing critical log level",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_ALERT("Logging: testing alert log level",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
+ ?LOG_EMERGENCY("Logging: testing emergency log level",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}).
diff --git a/deps/rabbit/src/rabbit_prequeue.erl b/deps/rabbit/src/rabbit_prequeue.erl
index b5af8927c7..fd368fc9d2 100644
--- a/deps/rabbit/src/rabbit_prequeue.erl
+++ b/deps/rabbit/src/rabbit_prequeue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_prequeue).
@@ -79,7 +79,7 @@ init(Q0, restart) when ?is_amqqueue(Q0) ->
crash_restart(Q0) when ?is_amqqueue(Q0) ->
QueueName = amqqueue:get_name(Q0),
- rabbit_log:error("Restarting crashed ~s.~n", [rabbit_misc:rs(QueueName)]),
+ rabbit_log:error("Restarting crashed ~s.", [rabbit_misc:rs(QueueName)]),
gen_server2:cast(self(), init),
Q1 = amqqueue:set_pid(Q0, self()),
rabbit_amqqueue_process:init(Q1).
diff --git a/deps/rabbit/src/rabbit_priority_queue.erl b/deps/rabbit/src/rabbit_priority_queue.erl
index 4b41b8dfbd..355fadcfc3 100644
--- a/deps/rabbit/src/rabbit_priority_queue.erl
+++ b/deps/rabbit/src/rabbit_priority_queue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2015-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2015-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_priority_queue).
@@ -67,7 +67,7 @@ enable() ->
{ok, RealBQ} = application:get_env(rabbit, backing_queue_module),
case RealBQ of
?MODULE -> ok;
- _ -> rabbit_log:info("Priority queues enabled, real BQ is ~s~n",
+ _ -> rabbit_log:info("Priority queues enabled, real BQ is ~s",
[RealBQ]),
application:set_env(
rabbitmq_priority_queue, backing_queue_module, RealBQ),
diff --git a/deps/rabbit/src/rabbit_queue_consumers.erl b/deps/rabbit/src/rabbit_queue_consumers.erl
index 4f826f72e8..39c759aa5e 100644
--- a/deps/rabbit/src/rabbit_queue_consumers.erl
+++ b/deps/rabbit/src/rabbit_queue_consumers.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_consumers).
@@ -12,9 +12,11 @@
send_drained/0, deliver/5, record_ack/3, subtract_acks/3,
possibly_unblock/3,
resume_fun/0, notify_sent_fun/1, activate_limit_fun/0,
- credit/6, utilisation/1, is_same/3, get_consumer/1, get/3,
+ credit/6, utilisation/1, capacity/1, is_same/3, get_consumer/1, get/3,
consumer_tag/1, get_infos/1]).
+-export([deactivate_limit_fun/0]).
+
%%----------------------------------------------------------------------------
-define(QUEUE, lqueue).
@@ -221,7 +223,6 @@ deliver(FetchFun, QName, false, State = #state{consumers = Consumers}, true, Sin
{delivered, R} ->
{delivered, false, R, State};
undelivered ->
- {ChPid, Consumer} = SingleActiveConsumer,
Consumers1 = remove_consumer(ChPid, Consumer#consumer.tag, Consumers),
{undelivered, true,
State#state{consumers = Consumers1, use = update_use(State#state.use, inactive)}}
@@ -386,6 +387,13 @@ activate_limit_fun() ->
C#cr{limiter = rabbit_limiter:activate(Limiter)}
end.
+-spec deactivate_limit_fun() -> cr_fun().
+
+deactivate_limit_fun() ->
+ fun (C = #cr{limiter = Limiter}) ->
+ C#cr{limiter = rabbit_limiter:deactivate(Limiter)}
+ end.
+
-spec credit(boolean(), integer(), boolean(), ch(), rabbit_types:ctag(),
state()) -> 'unchanged' | {'unblocked', state()}.
@@ -409,10 +417,13 @@ drain_mode(true) -> drain;
drain_mode(false) -> manual.
-spec utilisation(state()) -> ratio().
+utilisation(State) ->
+ capacity(State).
-utilisation(#state{use = {active, Since, Avg}}) ->
+-spec capacity(state()) -> ratio().
+capacity(#state{use = {active, Since, Avg}}) ->
use_avg(erlang:monotonic_time(micro_seconds) - Since, 0, Avg);
-utilisation(#state{use = {inactive, Since, Active, Avg}}) ->
+capacity(#state{use = {inactive, Since, Active, Avg}}) ->
use_avg(Active, erlang:monotonic_time(micro_seconds) - Since, Avg).
is_same(ChPid, ConsumerTag, {ChPid, #consumer{tag = ConsumerTag}}) ->
diff --git a/deps/rabbit/src/rabbit_queue_decorator.erl b/deps/rabbit/src/rabbit_queue_decorator.erl
index cbb50456c1..1d4854cd2f 100644
--- a/deps/rabbit/src/rabbit_queue_decorator.erl
+++ b/deps/rabbit/src/rabbit_queue_decorator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_decorator).
diff --git a/deps/rabbit/src/rabbit_queue_index.erl b/deps/rabbit/src/rabbit_queue_index.erl
index faab4380b5..e8c573c08f 100644
--- a/deps/rabbit/src/rabbit_queue_index.erl
+++ b/deps/rabbit/src/rabbit_queue_index.erl
@@ -2,11 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_index).
+-compile({inline, [segment_entry_count/0]}).
+
-export([erase/1, init/3, reset_state/1, recover/6,
terminate/3, delete_and_terminate/1,
pre_publish/7, flush_pre_publish_cache/2,
@@ -22,6 +24,9 @@
read_global_recovery_terms/1,
cleanup_global_recovery_terms/0]).
+%% Used by rabbit_vhost to set the segment_entry_count.
+-export([all_queue_directory_names/1]).
+
-define(CLEAN_FILENAME, "clean.dot").
%%----------------------------------------------------------------------------
@@ -43,13 +48,13 @@
%% then delivered, then ack'd.
%%
%% In order to be able to clean up ack'd messages, we write to segment
-%% files. These files have a fixed number of entries: ?SEGMENT_ENTRY_COUNT
+%% files. These files have a fixed number of entries: segment_entry_count()
%% publishes, delivers and acknowledgements. They are numbered, and so
%% it is known that the 0th segment contains messages 0 ->
-%% ?SEGMENT_ENTRY_COUNT - 1, the 1st segment contains messages
-%% ?SEGMENT_ENTRY_COUNT -> 2*?SEGMENT_ENTRY_COUNT - 1 and so on. As
+%% segment_entry_count() - 1, the 1st segment contains messages
+%% segment_entry_count() -> 2*segment_entry_count() - 1 and so on. As
%% such, in the segment files, we only refer to message sequence ids
-%% by the LSBs as SeqId rem ?SEGMENT_ENTRY_COUNT. This gives them a
+%% by the LSBs as SeqId rem segment_entry_count(). This gives them a
%% fixed size.
%%
%% However, transient messages which are not sent to disk at any point
@@ -127,8 +132,6 @@
%% binary generation/matching with constant vs variable lengths.
-define(REL_SEQ_BITS, 14).
-%% calculated as trunc(math:pow(2,?REL_SEQ_BITS))).
--define(SEGMENT_ENTRY_COUNT, 16384).
%% seq only is binary 01 followed by 14 bits of rel seq id
%% (range: 0 - 16383)
@@ -212,7 +215,7 @@
unacked
}).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
@@ -280,6 +283,8 @@ reset_state(#qistate{ queue_name = Name,
on_sync_fun(), on_sync_fun()) -> qistate().
init(#resource{ virtual_host = VHost } = Name, OnSyncFun, OnSyncMsgFun) ->
+ #{segment_entry_count := SegmentEntryCount} = rabbit_vhost:read_config(VHost),
+ put(segment_entry_count, SegmentEntryCount),
VHostDir = rabbit_vhost:msg_store_dir_path(VHost),
State = #qistate { dir = Dir } = blank_state(VHostDir, Name),
false = rabbit_file:is_file(Dir), %% is_file == is file or dir
@@ -294,14 +299,18 @@ init(#resource{ virtual_host = VHost } = Name, OnSyncFun, OnSyncMsgFun) ->
recover(#resource{ virtual_host = VHost } = Name, Terms, MsgStoreRecovered,
ContainsCheckFun, OnSyncFun, OnSyncMsgFun) ->
+ #{segment_entry_count := SegmentEntryCount} = rabbit_vhost:read_config(VHost),
+ put(segment_entry_count, SegmentEntryCount),
VHostDir = rabbit_vhost:msg_store_dir_path(VHost),
State = blank_state(VHostDir, Name),
State1 = State #qistate{on_sync = OnSyncFun,
on_sync_msg = OnSyncMsgFun},
CleanShutdown = Terms /= non_clean_shutdown,
case CleanShutdown andalso MsgStoreRecovered of
- true -> RecoveredCounts = proplists:get_value(segments, Terms, []),
- init_clean(RecoveredCounts, State1);
+ true -> case proplists:get_value(segments, Terms, non_clean_shutdown) of
+ non_clean_shutdown -> init_dirty(false, ContainsCheckFun, State1);
+ RecoveredCounts -> init_clean(RecoveredCounts, State1)
+ end;
false -> init_dirty(CleanShutdown, ContainsCheckFun, State1)
end.
@@ -352,11 +361,11 @@ pre_publish(MsgOrId, SeqId, MsgProps, IsPersistent, IsDelivered, JournalSizeHint
%% pre_publish_cache is the entry with most elements when compared to
%% delivered_cache so we only check the former in the guard.
maybe_flush_pre_publish_cache(JournalSizeHint,
- #qistate{pre_publish_cache = PPC} = State)
- when length(PPC) >= ?SEGMENT_ENTRY_COUNT ->
- flush_pre_publish_cache(JournalSizeHint, State);
-maybe_flush_pre_publish_cache(_JournalSizeHint, State) ->
- State.
+ #qistate{pre_publish_cache = PPC} = State) ->
+ case length(PPC) >= segment_entry_count() of
+ true -> flush_pre_publish_cache(JournalSizeHint, State);
+ false -> State
+ end.
flush_pre_publish_cache(JournalSizeHint, State) ->
State1 = flush_pre_publish_cache(State),
@@ -728,6 +737,9 @@ queue_index_walker_reader(QueueName, Gatherer) ->
ok = gatherer:finish(Gatherer).
scan_queue_segments(Fun, Acc, #resource{ virtual_host = VHost } = QueueName) ->
+ %% Set the segment_entry_count for this worker process.
+ #{segment_entry_count := SegmentEntryCount} = rabbit_vhost:read_config(VHost),
+ put(segment_entry_count, SegmentEntryCount),
VHostDir = rabbit_vhost:msg_store_dir_path(VHost),
scan_queue_segments(Fun, Acc, VHostDir, QueueName).
@@ -991,10 +1003,11 @@ notify_sync(State = #qistate{unconfirmed = UC,
%%----------------------------------------------------------------------------
seq_id_to_seg_and_rel_seq_id(SeqId) ->
- { SeqId div ?SEGMENT_ENTRY_COUNT, SeqId rem ?SEGMENT_ENTRY_COUNT }.
+ SegmentEntryCount = segment_entry_count(),
+ { SeqId div SegmentEntryCount, SeqId rem SegmentEntryCount }.
reconstruct_seq_id(Seg, RelSeq) ->
- (Seg * ?SEGMENT_ENTRY_COUNT) + RelSeq.
+ (Seg * segment_entry_count()) + RelSeq.
all_segment_nums(#qistate { dir = Dir, segments = Segments }) ->
lists:sort(
@@ -1163,7 +1176,10 @@ array_new() ->
array_new(undefined).
array_new(Default) ->
- array:new([{default, Default}, fixed, {size, ?SEGMENT_ENTRY_COUNT}]).
+ array:new([{default, Default}, fixed, {size, segment_entry_count()}]).
+
+segment_entry_count() ->
+ get(segment_entry_count).
bool_to_int(true ) -> 1;
bool_to_int(false) -> 0.
@@ -1478,7 +1494,7 @@ move_to_per_vhost_stores(#resource{virtual_host = VHost} = QueueName) ->
ok = rabbit_file:rename(OldQueueDir, NewQueueDir),
ok = ensure_queue_name_stub_file(NewQueueDir, QueueName);
false ->
- Msg = "Queue index directory '~s' not found for ~s~n",
+ Msg = "Queue index directory '~s' not found for ~s",
Args = [OldQueueDir, rabbit_misc:rs(QueueName)],
rabbit_log_upgrade:error(Msg, Args),
rabbit_log:error(Msg, Args)
diff --git a/deps/rabbit/src/rabbit_queue_location_client_local.erl b/deps/rabbit/src/rabbit_queue_location_client_local.erl
index 2df1608534..812dc4e9e0 100644
--- a/deps/rabbit/src/rabbit_queue_location_client_local.erl
+++ b/deps/rabbit/src/rabbit_queue_location_client_local.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_location_client_local).
diff --git a/deps/rabbit/src/rabbit_queue_location_min_masters.erl b/deps/rabbit/src/rabbit_queue_location_min_masters.erl
index 6535f082fe..1147c03806 100644
--- a/deps/rabbit/src/rabbit_queue_location_min_masters.erl
+++ b/deps/rabbit/src/rabbit_queue_location_min_masters.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_location_min_masters).
diff --git a/deps/rabbit/src/rabbit_queue_location_random.erl b/deps/rabbit/src/rabbit_queue_location_random.erl
index 7232fc6703..3ec8c6d585 100644
--- a/deps/rabbit/src/rabbit_queue_location_random.erl
+++ b/deps/rabbit/src/rabbit_queue_location_random.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_location_random).
diff --git a/deps/rabbit/src/rabbit_queue_location_validator.erl b/deps/rabbit/src/rabbit_queue_location_validator.erl
index bf41be622c..0c55b1bc9e 100644
--- a/deps/rabbit/src/rabbit_queue_location_validator.erl
+++ b/deps/rabbit/src/rabbit_queue_location_validator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_location_validator).
diff --git a/deps/rabbit/src/rabbit_queue_master_location_misc.erl b/deps/rabbit/src/rabbit_queue_master_location_misc.erl
index 37698e184f..e58890ede1 100644
--- a/deps/rabbit/src/rabbit_queue_master_location_misc.erl
+++ b/deps/rabbit/src/rabbit_queue_master_location_misc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_master_location_misc).
diff --git a/deps/rabbit/src/rabbit_queue_master_locator.erl b/deps/rabbit/src/rabbit_queue_master_locator.erl
index ff2e30f587..dbe0fc7401 100644
--- a/deps/rabbit/src/rabbit_queue_master_locator.erl
+++ b/deps/rabbit/src/rabbit_queue_master_locator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_master_locator).
diff --git a/deps/rabbit/src/rabbit_queue_type.erl b/deps/rabbit/src/rabbit_queue_type.erl
index 4e59b6a7c0..128d0a2bd0 100644
--- a/deps/rabbit/src/rabbit_queue_type.erl
+++ b/deps/rabbit/src/rabbit_queue_type.erl
@@ -33,10 +33,10 @@
dequeue/5,
fold_state/3,
is_policy_applicable/2,
- is_server_named_allowed/1
+ is_server_named_allowed/1,
+ notify_decorators/1
]).
-%% gah what is a good identity of a classic queue including all replicas
-type queue_name() :: rabbit_types:r(queue).
-type queue_ref() :: queue_name() | atom().
-type queue_state() :: term().
@@ -140,7 +140,7 @@
%% stateful
%% intitialise and return a queue type specific session context
--callback init(amqqueue:amqqueue()) -> queue_state().
+-callback init(amqqueue:amqqueue()) -> {ok, queue_state()} | {error, Reason :: term()}.
-callback close(queue_state()) -> ok.
%% update the queue type state from amqqrecord
@@ -199,6 +199,9 @@
-callback capabilities() ->
#{atom() := term()}.
+-callback notify_decorators(amqqueue:amqqueue()) ->
+ ok.
+
%% TODO: this should be controlled by a registry that is populated on boot
discover(<<"quorum">>) ->
rabbit_quorum_queue;
@@ -218,7 +221,8 @@ is_enabled(Type) ->
{'new' | 'existing' | 'owner_died', amqqueue:amqqueue()} |
{'absent', amqqueue:amqqueue(), absent_reason()} |
{protocol_error, Type :: atom(), Reason :: string(), Args :: term()}.
-declare(Q, Node) ->
+declare(Q0, Node) ->
+ Q = rabbit_queue_decorator:set(rabbit_policy:set(Q0)),
Mod = amqqueue:get_type(Q),
Mod:declare(Q, Node).
@@ -299,15 +303,19 @@ i_down(_K, _Q, _DownReason) -> ''.
is_policy_applicable(Q, Policy) ->
Mod = amqqueue:get_type(Q),
Capabilities = Mod:capabilities(),
- Applicable = maps:get(policies, Capabilities, []),
+ NotApplicable = maps:get(unsupported_policies, Capabilities, []),
lists:all(fun({P, _}) ->
- lists:member(P, Applicable)
+ not lists:member(P, NotApplicable)
end, Policy).
is_server_named_allowed(Type) ->
Capabilities = Type:capabilities(),
maps:get(server_named, Capabilities, false).
+notify_decorators(Q) ->
+ Mod = amqqueue:get_type(Q),
+ Mod:notify_decorators(Q).
+
-spec init() -> state().
init() ->
#?STATE{}.
@@ -376,12 +384,14 @@ recover(VHost, Qs) ->
rabbit_quorum_queue => [],
rabbit_stream_queue => []}, Qs),
maps:fold(fun (Mod, Queues, {R0, F0}) ->
- {R, F} = Mod:recover(VHost, Queues),
+ {Taken, {R, F}} = timer:tc(Mod, recover, [VHost, Queues]),
+ rabbit_log:info("Recovering ~b queues of type ~s took ~bms",
+ [length(Queues), Mod, Taken div 1000]),
{R0 ++ R, F0 ++ F}
end, {[], []}, ByType).
-spec handle_down(pid(), term(), state()) ->
- {ok, state(), actions()} | {eol, queue_ref()} | {error, term()}.
+ {ok, state(), actions()} | {eol, state(), queue_ref()} | {error, term()}.
handle_down(Pid, Info, #?STATE{monitor_registry = Reg0} = State0) ->
%% lookup queue ref in monitor registry
case maps:take(Pid, Reg0) of
@@ -390,7 +400,7 @@ handle_down(Pid, Info, #?STATE{monitor_registry = Reg0} = State0) ->
{ok, State, Actions} ->
{ok, State#?STATE{monitor_registry = Reg}, Actions};
eol ->
- {eol, QRef};
+ {eol, State0#?STATE{monitor_registry = Reg}, QRef};
Err ->
Err
end;
@@ -432,14 +442,23 @@ module(QRef, Ctxs) ->
-spec deliver([amqqueue:amqqueue()], Delivery :: term(),
stateless | state()) ->
- {ok, state(), actions()}.
-deliver(Qs, Delivery, stateless) ->
+ {ok, state(), actions()} | {error, Reason :: term()}.
+deliver(Qs, Delivery, State) ->
+ try
+ deliver0(Qs, Delivery, State)
+ catch
+ exit:Reason ->
+ {error, Reason}
+ end.
+
+deliver0(Qs, Delivery, stateless) ->
_ = lists:map(fun(Q) ->
Mod = amqqueue:get_type(Q),
_ = Mod:deliver([{Q, stateless}], Delivery)
end, Qs),
{ok, stateless, []};
-deliver(Qs, Delivery, #?STATE{} = State0) ->
+deliver0(Qs, Delivery, #?STATE{} = State0) ->
+ %% TODO: optimise single queue case?
%% sort by queue type - then dispatch each group
ByType = lists:foldl(
fun (Q, Acc) ->
@@ -457,7 +476,7 @@ deliver(Qs, Delivery, #?STATE{} = State0) ->
end, {[], []}, ByType),
State = lists:foldl(
fun({Q, S}, Acc) ->
- Ctx = get_ctx(Q, Acc),
+ Ctx = get_ctx_with(Q, Acc, S),
set_ctx(qref(Q), Ctx#ctx{state = S}, Acc)
end, State0, Xs),
return_ok(State, Actions).
@@ -511,21 +530,37 @@ dequeue(Q, NoAck, LimiterPid, CTag, Ctxs) ->
Err
end.
-get_ctx(Q, #?STATE{ctxs = Contexts}) when ?is_amqqueue(Q) ->
+get_ctx(QOrQref, State) ->
+ get_ctx_with(QOrQref, State, undefined).
+
+get_ctx_with(Q, #?STATE{ctxs = Contexts}, InitState)
+ when ?is_amqqueue(Q) ->
Ref = qref(Q),
case Contexts of
#{Ref := #ctx{module = Mod,
state = State} = Ctx} ->
Ctx#ctx{state = Mod:update(Q, State)};
- _ ->
- %% not found - initialize
+ _ when InitState == undefined ->
+ %% not found and no initial state passed - initialize new state
+ Mod = amqqueue:get_type(Q),
+ Name = amqqueue:get_name(Q),
+ case Mod:init(Q) of
+ {error, Reason} ->
+ exit({Reason, Ref});
+ {ok, QState} ->
+ #ctx{module = Mod,
+ name = Name,
+ state = QState}
+ end;
+ _ ->
+ %% not found - initialize with supplied initial state
Mod = amqqueue:get_type(Q),
Name = amqqueue:get_name(Q),
#ctx{module = Mod,
name = Name,
- state = Mod:init(Q)}
+ state = InitState}
end;
-get_ctx(QRef, Contexts) when ?QREF(QRef) ->
+get_ctx_with(QRef, Contexts, undefined) when ?QREF(QRef) ->
case get_ctx(QRef, Contexts, undefined) of
undefined ->
exit({queue_context_not_found, QRef});
diff --git a/deps/rabbit/src/rabbit_queue_type_util.erl b/deps/rabbit/src/rabbit_queue_type_util.erl
index e417cb13c4..6edcfa0c44 100644
--- a/deps/rabbit/src/rabbit_queue_type_util.erl
+++ b/deps/rabbit/src/rabbit_queue_type_util.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2018-2020 Pivotal Software, Inc. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_type_util).
@@ -23,7 +14,7 @@
check_non_durable/1,
run_checks/2]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-include("amqqueue.hrl").
args_policy_lookup(Name, Resolve, Q) when ?is_amqqueue(Q) ->
@@ -36,11 +27,18 @@ args_policy_lookup(Name, Resolve, Q) when ?is_amqqueue(Q) ->
{PolVal, {_Type, ArgVal}} -> Resolve(PolVal, ArgVal)
end.
-%% TODO escape hack
-qname_to_internal_name(#resource{virtual_host = <<"/">>, name = Name}) ->
- erlang:binary_to_atom(<<"%2F_", Name/binary>>, utf8);
-qname_to_internal_name(#resource{virtual_host = VHost, name = Name}) ->
- erlang:binary_to_atom(<<VHost/binary, "_", Name/binary>>, utf8).
+qname_to_internal_name(QName) ->
+ case name_concat(QName) of
+ Name when byte_size(Name) =< 255 ->
+ {ok, erlang:binary_to_atom(Name)};
+ Name ->
+ {error, {too_long, Name}}
+ end.
+
+name_concat(#resource{virtual_host = <<"/">>, name = Name}) ->
+ <<"%2F_", Name/binary>>;
+name_concat(#resource{virtual_host = VHost, name = Name}) ->
+ <<VHost/binary, "_", Name/binary>>.
check_auto_delete(Q) when ?amqqueue_is_auto_delete(Q) ->
Name = amqqueue:get_name(Q),
diff --git a/deps/rabbit/src/rabbit_quorum_memory_manager.erl b/deps/rabbit/src/rabbit_quorum_memory_manager.erl
index 94c2ef6b4b..6ff14090f7 100644
--- a/deps/rabbit/src/rabbit_quorum_memory_manager.erl
+++ b/deps/rabbit/src/rabbit_quorum_memory_manager.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_quorum_memory_manager).
@@ -60,7 +60,7 @@ code_change(_OldVsn, State, _Extra) ->
{ok, State}.
force_roll_over(State) ->
- ra_log_wal:force_roll_over(ra_log_wal),
+ rabbit_quorum_queue:wal_force_roll_over(node()),
State#state{last_roll_over = erlang:system_time(millisecond)}.
interval() ->
diff --git a/deps/rabbit/src/rabbit_quorum_queue.erl b/deps/rabbit/src/rabbit_quorum_queue.erl
index 95cc93d728..a4c6d5dd5f 100644
--- a/deps/rabbit/src/rabbit_quorum_queue.erl
+++ b/deps/rabbit/src/rabbit_quorum_queue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_quorum_queue).
@@ -13,7 +13,14 @@
close/1,
update/2,
handle_event/2]).
--export([is_recoverable/1, recover/2, stop/1, delete/4, delete_immediately/2]).
+-export([is_recoverable/1,
+ recover/2,
+ stop/1,
+ start_server/1,
+ restart_server/1,
+ stop_server/1,
+ delete/4,
+ delete_immediately/2]).
-export([state_info/1, info/2, stat/1, infos/1]).
-export([settle/4, dequeue/4, consume/3, cancel/5]).
-export([credit/4]).
@@ -38,16 +45,23 @@
-export([shrink_all/1,
grow/4]).
-export([transfer_leadership/2, get_replicas/1, queue_length/1]).
--export([file_handle_leader_reservation/1, file_handle_other_reservation/0]).
+-export([file_handle_leader_reservation/1,
+ file_handle_other_reservation/0]).
-export([file_handle_release_reservation/0]).
--export([list_with_minimum_quorum/0, list_with_minimum_quorum_for_cli/0,
- filter_quorum_critical/1, filter_quorum_critical/2,
+-export([list_with_minimum_quorum/0,
+ list_with_minimum_quorum_for_cli/0,
+ filter_quorum_critical/1,
+ filter_quorum_critical/2,
all_replica_states/0]).
-export([capabilities/0]).
-export([repair_amqqueue_nodes/1,
repair_amqqueue_nodes/2
]).
--export([reclaim_memory/2]).
+-export([reclaim_memory/2,
+ wal_force_roll_over/1]).
+-export([notify_decorators/1,
+ notify_decorators/3,
+ spawn_notify_decorators/3]).
-export([is_enabled/0,
declare/2]).
@@ -56,12 +70,15 @@
qname_to_internal_name/1]).
-include_lib("stdlib/include/qlc.hrl").
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-include("amqqueue.hrl").
-type msg_id() :: non_neg_integer().
-type qmsg() :: {rabbit_types:r('queue'), pid(), msg_id(), boolean(), rabbit_types:message()}.
+-define(RA_SYSTEM, quorum_queues).
+-define(RA_WAL_NAME, ra_log_wal).
+
-define(STATISTICS_KEYS,
[policy,
operator_policy,
@@ -96,7 +113,7 @@ is_enabled() ->
%%----------------------------------------------------------------------------
--spec init(amqqueue:amqqueue()) -> rabbit_fifo_client:state().
+-spec init(amqqueue:amqqueue()) -> {ok, rabbit_fifo_client:state()}.
init(Q) when ?is_amqqueue(Q) ->
{ok, SoftLimit} = application:get_env(rabbit, quorum_commands_soft_limit),
%% This lookup could potentially return an {error, not_found}, but we do not
@@ -107,9 +124,9 @@ init(Q) when ?is_amqqueue(Q) ->
%% Ensure the leader is listed first
Servers0 = [{Name, N} || N <- Nodes],
Servers = [Leader | lists:delete(Leader, Servers0)],
- rabbit_fifo_client:init(QName, Servers, SoftLimit,
- fun() -> credit_flow:block(Name) end,
- fun() -> credit_flow:unblock(Name), ok end).
+ {ok, rabbit_fifo_client:init(QName, Servers, SoftLimit,
+ fun() -> credit_flow:block(Name) end,
+ fun() -> credit_flow:unblock(Name), ok end)}.
-spec close(rabbit_fifo_client:state()) -> ok.
close(_State) ->
@@ -152,28 +169,42 @@ start_cluster(Q) ->
Opts = amqqueue:get_options(Q),
ActingUser = maps:get(user, Opts, ?UNKNOWN_USER),
QuorumSize = get_default_quorum_initial_group_size(Arguments),
- RaName = qname_to_internal_name(QName),
+ RaName = case qname_to_internal_name(QName) of
+ {ok, A} ->
+ A;
+ {error, {too_long, N}} ->
+ rabbit_data_coercion:to_atom(ra:new_uid(N))
+ end,
Id = {RaName, node()},
- Nodes = select_quorum_nodes(QuorumSize, rabbit_mnesia:cluster_nodes(all)),
+ Nodes = select_quorum_nodes(QuorumSize, rabbit_nodes:all()),
NewQ0 = amqqueue:set_pid(Q, Id),
NewQ1 = amqqueue:set_type_state(NewQ0, #{nodes => Nodes}),
+
+ rabbit_log:debug("Will start up to ~w replicas for quorum queue ~s",
+ [QuorumSize, rabbit_misc:rs(QName)]),
case rabbit_amqqueue:internal_declare(NewQ1, false) of
{created, NewQ} ->
- TickTimeout = application:get_env(rabbit, quorum_tick_interval, ?TICK_TIMEOUT),
+ TickTimeout = application:get_env(rabbit, quorum_tick_interval,
+ ?TICK_TIMEOUT),
RaConfs = [make_ra_conf(NewQ, ServerId, TickTimeout)
|| ServerId <- members(NewQ)],
- case ra:start_cluster(RaConfs) of
+ case ra:start_cluster(?RA_SYSTEM, RaConfs) of
{ok, _, _} ->
+ %% ensure the latest config is evaluated properly
+ %% even when running the machine version from 0
+ %% as earlier versions may not understand all the config
+ %% keys
%% TODO: handle error - what should be done if the
%% config cannot be updated
ok = rabbit_fifo_client:update_machine_state(Id,
ra_machine_config(NewQ)),
- %% force a policy change to ensure the latest config is
- %% updated even when running the machine version from 0
+ notify_decorators(QName, startup),
rabbit_event:notify(queue_created,
[{name, QName},
{durable, Durable},
{auto_delete, AutoDelete},
+ {exclusive, false},
+ {type, amqqueue:get_type(Q)},
{arguments, Arguments},
{user_who_performed_action,
ActingUser}]),
@@ -344,9 +375,15 @@ filter_quorum_critical(Queues, ReplicaStates) ->
end, Queues).
capabilities() ->
- #{policies => [<<"max-length">>, <<"max-length-bytes">>, <<"overflow">>,
- <<"expires">>, <<"max-in-memory-length">>, <<"max-in-memory-bytes">>,
- <<"delivery-limit">>, <<"dead-letter-exchange">>, <<"dead-letter-routing-key">>],
+ #{unsupported_policies =>
+ [ %% Classic policies
+ <<"message-ttl">>, <<"max-priority">>, <<"queue-mode">>,
+ <<"single-active-consumer">>, <<"ha-mode">>, <<"ha-params">>,
+ <<"ha-sync-mode">>, <<"ha-promote-on-shutdown">>, <<"ha-promote-on-failure">>,
+ <<"queue-master-locator">>,
+ %% Stream policies
+ <<"max-age">>, <<"stream-max-segment-size-bytes">>,
+ <<"queue-leader-locator">>, <<"initial-cluster-size">>],
queue_arguments => [<<"x-expires">>, <<"x-dead-letter-exchange">>,
<<"x-dead-letter-routing-key">>, <<"x-max-length">>,
<<"x-max-length-bytes">>, <<"x-max-in-memory-length">>,
@@ -367,13 +404,20 @@ spawn_deleter(QName) ->
delete(Q, false, false, <<"expired">>)
end).
+spawn_notify_decorators(QName, Fun, Args) ->
+ spawn(fun () ->
+ notify_decorators(QName, Fun, Args)
+ end).
+
handle_tick(QName,
{Name, MR, MU, M, C, MsgBytesReady, MsgBytesUnack},
Nodes) ->
%% this makes calls to remote processes so cannot be run inside the
%% ra server
Self = self(),
- _ = spawn(fun() ->
+ _ = spawn(
+ fun() ->
+ try
R = reductions(Name),
rabbit_core_metrics:queue_stats(QName, MR, MU, M, R),
Util = case C of
@@ -381,6 +425,7 @@ handle_tick(QName,
_ -> rabbit_fifo:usage(Name)
end,
Infos = [{consumers, C},
+ {consumer_capacity, Util},
{consumer_utilisation, Util},
{message_bytes_ready, MsgBytesReady},
{message_bytes_unacknowledged, MsgBytesUnack},
@@ -397,12 +442,12 @@ handle_tick(QName,
{messages_unacknowledged, MU},
{reductions, R}]),
ok = repair_leader_record(QName, Self),
- ExpectedNodes = rabbit_mnesia:cluster_nodes(all),
+ ExpectedNodes = rabbit_nodes:all(),
case Nodes -- ExpectedNodes of
[] ->
ok;
Stale ->
- rabbit_log:info("~s: stale nodes detected. Purging ~w~n",
+ rabbit_log:info("~s: stale nodes detected. Purging ~w",
[rabbit_misc:rs(QName), Stale]),
%% pipeline purge command
{ok, Q} = rabbit_amqqueue:lookup(QName),
@@ -411,7 +456,11 @@ handle_tick(QName,
ok
end
- end),
+ catch
+ _:_ ->
+ ok
+ end
+ end),
ok.
repair_leader_record(QName, Self) ->
@@ -485,7 +534,7 @@ recover(_Vhost, Queues) ->
QName = amqqueue:get_name(Q0),
Nodes = get_nodes(Q0),
Formatter = {?MODULE, format_ra_event, [QName]},
- Res = case ra:restart_server({Name, node()},
+ Res = case ra:restart_server(?RA_SYSTEM, {Name, node()},
#{ra_event_formatter => Formatter}) of
ok ->
% queue was restarted, good
@@ -497,7 +546,8 @@ recover(_Vhost, Queues) ->
% so needs to be started from scratch.
Machine = ra_machine(Q0),
RaNodes = [{Name, Node} || Node <- Nodes],
- case ra:start_server(Name, {Name, node()}, Machine, RaNodes) of
+ case ra:start_server(?RA_SYSTEM, Name, {Name, node()},
+ Machine, RaNodes) of
ok -> ok;
Err2 ->
rabbit_log:warning("recover: quorum queue ~w could not"
@@ -519,7 +569,8 @@ recover(_Vhost, Queues) ->
%% present in the rabbit_queue table and not just in
%% rabbit_durable_queue
%% So many code paths are dependent on this.
- {ok, Q} = rabbit_amqqueue:ensure_rabbit_queue_record_is_initialized(Q0),
+ ok = rabbit_amqqueue:store_queue_ram_dirty(Q0),
+ Q = Q0,
case Res of
ok ->
{[Q | R0], F0};
@@ -532,10 +583,22 @@ recover(_Vhost, Queues) ->
stop(VHost) ->
_ = [begin
Pid = amqqueue:get_pid(Q),
- ra:stop_server(Pid)
+ ra:stop_server(?RA_SYSTEM, Pid)
end || Q <- find_quorum_queues(VHost)],
ok.
+-spec stop_server({atom(), node()}) -> ok | {error, term()}.
+stop_server({_, _} = Ref) ->
+ ra:stop_server(?RA_SYSTEM, Ref).
+
+-spec start_server(map()) -> ok | {error, term()}.
+start_server(Conf) when is_map(Conf) ->
+ ra:start_server(?RA_SYSTEM, Conf).
+
+-spec restart_server({atom(), node()}) -> ok | {error, term()}.
+restart_server({_, _} = Ref) ->
+ ra:restart_server(?RA_SYSTEM, Ref).
+
-spec delete(amqqueue:amqqueue(),
boolean(), boolean(),
rabbit_types:username()) ->
@@ -566,6 +629,7 @@ delete(Q, _IfUnused, _IfEmpty, ActingUser) when ?amqqueue_is_quorum(Q) ->
after Timeout ->
ok = force_delete_queue(Servers)
end,
+ notify_decorators(QName, shutdown),
ok = delete_queue_data(QName, ActingUser),
rpc:call(LeaderNode, rabbit_core_metrics, queue_deleted, [QName],
?RPC_TIMEOUT),
@@ -587,6 +651,7 @@ delete(Q, _IfUnused, _IfEmpty, ActingUser) when ?amqqueue_is_quorum(Q) ->
" Attempting force delete.",
[rabbit_misc:rs(QName), Errs]),
ok = force_delete_queue(Servers),
+ notify_decorators(QName, shutdown),
delete_queue_data(QName, ActingUser),
{ok, ReadyMsgs}
end
@@ -594,12 +659,12 @@ delete(Q, _IfUnused, _IfEmpty, ActingUser) when ?amqqueue_is_quorum(Q) ->
force_delete_queue(Servers) ->
[begin
- case catch(ra:force_delete_server(S)) of
+ case catch(ra:force_delete_server(?RA_SYSTEM, S)) of
ok -> ok;
Err ->
rabbit_log:warning(
"Force delete of ~w failed with: ~w"
- "This may require manual data clean up~n",
+ "This may require manual data clean up",
[S, Err]),
ok
end
@@ -646,7 +711,7 @@ dequeue(NoAck, _LimiterPid, CTag0, QState0) ->
rabbit_queue_type:consume_spec(),
rabbit_fifo_client:state()) ->
{ok, rabbit_fifo_client:state(), rabbit_queue_type:actions()} |
- {error, global_qos_not_supported_for_queue_type}.
+ {error, global_qos_not_supported_for_queue_type | timeout}.
consume(Q, #{limiter_active := true}, _State)
when ?amqqueue_is_quorum(Q) ->
{error, global_qos_not_supported_for_queue_type};
@@ -662,7 +727,6 @@ consume(Q, Spec, QState0) when ?amqqueue_is_quorum(Q) ->
%% TODO: validate consumer arguments
%% currently quorum queues do not support any arguments
QName = amqqueue:get_name(Q),
- QPid = amqqueue:get_pid(Q),
maybe_send_reply(ChPid, OkMsg),
ConsumerTag = quorum_ctag(ConsumerTag0),
%% A prefetch count of 0 means no limitation,
@@ -694,36 +758,43 @@ consume(Q, Spec, QState0) when ?amqqueue_is_quorum(Q) ->
QState1);
_ -> QState1
end,
- case ra:local_query(QPid,
- fun rabbit_fifo:query_single_active_consumer/1) of
- {ok, {_, SacResult}, _} ->
- SingleActiveConsumerOn = single_active_consumer_on(Q),
- {IsSingleActiveConsumer, ActivityStatus} = case {SingleActiveConsumerOn, SacResult} of
- {false, _} ->
- {true, up};
- {true, {value, {ConsumerTag, ChPid}}} ->
- {true, single_active};
- _ ->
- {false, waiting}
- end,
+ case single_active_consumer_on(Q) of
+ true ->
+ %% get the leader from state
+ case rabbit_fifo_client:query_single_active_consumer(QState) of
+ {ok, SacResult} ->
+ ActivityStatus = case SacResult of
+ {value, {ConsumerTag, ChPid}} ->
+ single_active;
+ _ ->
+ waiting
+ end,
+ rabbit_core_metrics:consumer_created(
+ ChPid, ConsumerTag, ExclusiveConsume,
+ AckRequired, QName,
+ ConsumerPrefetchCount, ActivityStatus == single_active, %% Active
+ ActivityStatus, Args),
+ emit_consumer_created(ChPid, ConsumerTag, ExclusiveConsume,
+ AckRequired, QName, Prefetch,
+ Args, none, ActingUser),
+ {ok, QState, []};
+ {error, Error} ->
+ Error;
+ {timeout, _} ->
+ {error, timeout}
+ end;
+ false ->
rabbit_core_metrics:consumer_created(
- ChPid, ConsumerTag, ExclusiveConsume,
- AckRequired, QName,
- ConsumerPrefetchCount, IsSingleActiveConsumer,
- ActivityStatus, Args),
+ ChPid, ConsumerTag, ExclusiveConsume,
+ AckRequired, QName,
+ ConsumerPrefetchCount, true, %% Active
+ up, Args),
emit_consumer_created(ChPid, ConsumerTag, ExclusiveConsume,
- AckRequired, QName, Prefetch,
- Args, none, ActingUser),
- {ok, QState, []};
- {error, Error} ->
- Error;
- {timeout, _} ->
- {error, timeout}
+ AckRequired, QName, Prefetch,
+ Args, none, ActingUser),
+ {ok, QState, []}
end.
-% -spec basic_cancel(rabbit_types:ctag(), ChPid :: pid(), any(), rabbit_fifo_client:state()) ->
-% {'ok', rabbit_fifo_client:state()}.
-
cancel(_Q, ConsumerTag, OkMsg, _ActingUser, State) ->
maybe_send_reply(self(), OkMsg),
rabbit_fifo_client:cancel_checkout(quorum_ctag(ConsumerTag), State).
@@ -768,7 +839,8 @@ deliver(true, Delivery, QState0) ->
rabbit_fifo_client:enqueue(Delivery#delivery.msg_seq_no,
Delivery#delivery.message, QState0).
-deliver(QSs, #delivery{confirm = Confirm} = Delivery) ->
+deliver(QSs, #delivery{confirm = Confirm} = Delivery0) ->
+ Delivery = clean_delivery(Delivery0),
lists:foldl(
fun({Q, stateless}, {Qs, Actions}) ->
QRef = amqqueue:get_pid(Q),
@@ -840,8 +912,8 @@ stat(Q, Timeout) when ?is_amqqueue(Q) ->
-spec purge(amqqueue:amqqueue()) ->
{ok, non_neg_integer()}.
purge(Q) when ?is_amqqueue(Q) ->
- Node = amqqueue:get_pid(Q),
- rabbit_fifo_client:purge(Node).
+ Server = amqqueue:get_pid(Q),
+ rabbit_fifo_client:purge(Server).
requeue(ConsumerTag, MsgIds, QState) ->
rabbit_fifo_client:return(quorum_ctag(ConsumerTag), MsgIds, QState).
@@ -854,19 +926,19 @@ cleanup_data_dir() ->
|| Q <- rabbit_amqqueue:list_by_type(?MODULE),
lists:member(node(), get_nodes(Q))],
NoQQClusters = rabbit_ra_registry:list_not_quorum_clusters(),
- Registered = ra_directory:list_registered(),
+ Registered = ra_directory:list_registered(?RA_SYSTEM),
Running = Names ++ NoQQClusters,
_ = [maybe_delete_data_dir(UId) || {Name, UId} <- Registered,
not lists:member(Name, Running)],
ok.
maybe_delete_data_dir(UId) ->
- Dir = ra_env:server_data_dir(UId),
+ Dir = ra_env:server_data_dir(?RA_SYSTEM, UId),
{ok, Config} = ra_log:read_config(Dir),
case maps:get(machine, Config) of
{module, rabbit_fifo, _} ->
ra_lib:recursive_delete(Dir),
- ra_directory:unregister_name(UId);
+ ra_directory:unregister_name(?RA_SYSTEM, UId);
_ ->
ok
end.
@@ -893,11 +965,11 @@ cluster_state(Name) ->
status(Vhost, QueueName) ->
%% Handle not found queues
QName = #resource{virtual_host = Vhost, name = QueueName, kind = queue},
- RName = qname_to_internal_name(QName),
case rabbit_amqqueue:lookup(QName) of
{ok, Q} when ?amqqueue_is_classic(Q) ->
{error, classic_queue_not_supported};
{ok, Q} when ?amqqueue_is_quorum(Q) ->
+ {RName, _} = amqqueue:get_pid(Q),
Nodes = get_nodes(Q),
[begin
case get_sys_status({RName, N}) of
@@ -976,7 +1048,7 @@ add_member(Q, Node, Timeout) when ?amqqueue_is_quorum(Q) ->
TickTimeout = application:get_env(rabbit, quorum_tick_interval,
?TICK_TIMEOUT),
Conf = make_ra_conf(Q, ServerId, TickTimeout),
- case ra:start_server(Conf) of
+ case ra:start_server(?RA_SYSTEM, Conf) of
ok ->
case ra:add_member(Members, ServerId, Timeout) of
{ok, _, Leader} ->
@@ -991,11 +1063,11 @@ add_member(Q, Node, Timeout) when ?amqqueue_is_quorum(Q) ->
fun() -> rabbit_amqqueue:update(QName, Fun) end),
ok;
{timeout, _} ->
- _ = ra:force_delete_server(ServerId),
+ _ = ra:force_delete_server(?RA_SYSTEM, ServerId),
_ = ra:remove_member(Members, ServerId),
{error, timeout};
E ->
- _ = ra:force_delete_server(ServerId),
+ _ = ra:force_delete_server(?RA_SYSTEM, ServerId),
E
end;
E ->
@@ -1042,7 +1114,7 @@ delete_member(Q, Node) when ?amqqueue_is_quorum(Q) ->
end,
rabbit_misc:execute_mnesia_transaction(
fun() -> rabbit_amqqueue:update(QName, Fun) end),
- case ra:force_delete_server(ServerId) of
+ case ra:force_delete_server(?RA_SYSTEM, ServerId) of
ok ->
ok;
{error, {badrpc, nodedown}} ->
@@ -1176,6 +1248,10 @@ reclaim_memory(Vhost, QueueName) ->
E
end.
+-spec wal_force_roll_over(node()) -> ok.
+ wal_force_roll_over(Node) ->
+ ra_log_wal:force_roll_over({?RA_WAL_NAME, Node}).
+
%%----------------------------------------------------------------------------
dlx_mfa(Q) ->
DLX = init_dlx(args_policy_lookup(<<"dead-letter-exchange">>,
@@ -1444,8 +1520,10 @@ queue_name(RaFifoState) ->
get_default_quorum_initial_group_size(Arguments) ->
case rabbit_misc:table_lookup(Arguments, <<"x-quorum-initial-group-size">>) of
- undefined -> application:get_env(rabbit, default_quorum_initial_group_size);
- {_Type, Val} -> Val
+ undefined ->
+ application:get_env(rabbit, quorum_cluster_size, 3);
+ {_Type, Val} ->
+ Val
end.
select_quorum_nodes(Size, All) when length(All) =< Size ->
@@ -1521,3 +1599,46 @@ parse_credit_args(Default, Args) ->
undefined ->
{simple_prefetch, Default, false}
end.
+
+-spec notify_decorators(amqqueue:amqqueue()) -> 'ok'.
+notify_decorators(Q) when ?is_amqqueue(Q) ->
+ QName = amqqueue:get_name(Q),
+ QPid = amqqueue:get_pid(Q),
+ case ra:local_query(QPid, fun rabbit_fifo:query_notify_decorators_info/1) of
+ {ok, {_, {MaxActivePriority, IsEmpty}}, _} ->
+ notify_decorators(QName, consumer_state_changed,
+ [MaxActivePriority, IsEmpty]);
+ _ -> ok
+ end.
+
+notify_decorators(QName, Event) ->
+ notify_decorators(QName, Event, []).
+
+notify_decorators(QName, F, A) ->
+ %% Look up again in case policy and hence decorators have changed
+ case rabbit_amqqueue:lookup(QName) of
+ {ok, Q} ->
+ Ds = amqqueue:get_decorators(Q),
+ [ok = apply(M, F, [Q|A]) || M <- rabbit_queue_decorator:select(Ds)],
+ ok;
+ {error, not_found} ->
+ ok
+ end.
+
+%% remove any data that a quorum queue doesn't need
+clean_delivery(#delivery{message =
+ #basic_message{content = Content0} = Msg} = Delivery) ->
+ Content = case Content0 of
+ #content{properties = none} ->
+ Content0;
+ #content{protocol = none} ->
+ Content0;
+ #content{properties = Props,
+ protocol = Proto} ->
+ Content0#content{properties = none,
+ properties_bin = Proto:encode_properties(Props)}
+ end,
+
+ %% TODO: we could also consider clearing out the message id here
+ Delivery#delivery{message = Msg#basic_message{content = Content}}.
+
diff --git a/deps/rabbit/src/rabbit_ra_registry.erl b/deps/rabbit/src/rabbit_ra_registry.erl
index b02d89eda5..b3a8e490dd 100644
--- a/deps/rabbit/src/rabbit_ra_registry.erl
+++ b/deps/rabbit/src/rabbit_ra_registry.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_ra_registry).
diff --git a/deps/rabbit/src/rabbit_reader.erl b/deps/rabbit/src/rabbit_reader.erl
index c91dbbc105..2a4383dd3a 100644
--- a/deps/rabbit/src/rabbit_reader.erl
+++ b/deps/rabbit/src/rabbit_reader.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_reader).
@@ -45,8 +45,8 @@
%%
%% Reader processes are special processes (in the OTP sense).
--include("rabbit_framing.hrl").
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([start_link/2, info_keys/0, info/1, info/2, force_event_refresh/2,
shutdown/2]).
@@ -272,19 +272,16 @@ server_capabilities(_) ->
%%--------------------------------------------------------------------------
socket_error(Reason) when is_atom(Reason) ->
- rabbit_log_connection:error("Error on AMQP connection ~p: ~s~n",
+ rabbit_log_connection:error("Error on AMQP connection ~p: ~s",
[self(), rabbit_misc:format_inet_error(Reason)]);
socket_error(Reason) ->
- Fmt = "Error on AMQP connection ~p:~n~p~n",
+ Fmt = "Error on AMQP connection ~p:~n~p",
Args = [self(), Reason],
case Reason of
%% The socket was closed while upgrading to SSL.
%% This is presumably a TCP healthcheck, so don't log
%% it unless specified otherwise.
{ssl_upgrade_error, closed} ->
- %% Lager sinks (rabbit_log_connection)
- %% are handled by the lager parse_transform.
- %% Hence have to define the loglevel as a function call.
rabbit_log_connection:debug(Fmt, Args);
_ ->
rabbit_log_connection:error(Fmt, Args)
@@ -365,11 +362,11 @@ start_connection(Parent, HelperSup, Deb, Sock) ->
%% connection was closed cleanly by the client
#v1{connection = #connection{user = #user{username = Username},
vhost = VHost}} ->
- rabbit_log_connection:info("closing AMQP connection ~p (~s, vhost: '~s', user: '~s')~n",
+ rabbit_log_connection:info("closing AMQP connection ~p (~s, vhost: '~s', user: '~s')",
[self(), dynamic_connection_name(Name), VHost, Username]);
%% just to be more defensive
_ ->
- rabbit_log_connection:info("closing AMQP connection ~p (~s)~n",
+ rabbit_log_connection:info("closing AMQP connection ~p (~s)",
[self(), dynamic_connection_name(Name)])
end
catch
@@ -419,36 +416,36 @@ log_connection_exception(Severity, Name, {heartbeat_timeout, TimeoutSec}) ->
%% Long line to avoid extra spaces and line breaks in log
log_connection_exception_with_severity(Severity,
"closing AMQP connection ~p (~s):~n"
- "missed heartbeats from client, timeout: ~ps~n",
+ "missed heartbeats from client, timeout: ~ps",
[self(), Name, TimeoutSec]);
log_connection_exception(Severity, Name, {connection_closed_abruptly,
#v1{connection = #connection{user = #user{username = Username},
vhost = VHost}}}) ->
log_connection_exception_with_severity(Severity,
- "closing AMQP connection ~p (~s, vhost: '~s', user: '~s'):~nclient unexpectedly closed TCP connection~n",
+ "closing AMQP connection ~p (~s, vhost: '~s', user: '~s'):~nclient unexpectedly closed TCP connection",
[self(), Name, VHost, Username]);
%% when client abruptly closes connection before connection.open/authentication/authorization
%% succeeded, don't log username and vhost as 'none'
log_connection_exception(Severity, Name, {connection_closed_abruptly, _}) ->
log_connection_exception_with_severity(Severity,
- "closing AMQP connection ~p (~s):~nclient unexpectedly closed TCP connection~n",
+ "closing AMQP connection ~p (~s):~nclient unexpectedly closed TCP connection",
[self(), Name]);
%% failed connection.tune negotiations
log_connection_exception(Severity, Name, {handshake_error, tuning, _Channel,
{exit, #amqp_error{explanation = Explanation},
_Method, _Stacktrace}}) ->
log_connection_exception_with_severity(Severity,
- "closing AMQP connection ~p (~s):~nfailed to negotiate connection parameters: ~s~n",
+ "closing AMQP connection ~p (~s):~nfailed to negotiate connection parameters: ~s",
[self(), Name, Explanation]);
%% old exception structure
log_connection_exception(Severity, Name, connection_closed_abruptly) ->
log_connection_exception_with_severity(Severity,
"closing AMQP connection ~p (~s):~n"
- "client unexpectedly closed TCP connection~n",
+ "client unexpectedly closed TCP connection",
[self(), Name]);
log_connection_exception(Severity, Name, Ex) ->
log_connection_exception_with_severity(Severity,
- "closing AMQP connection ~p (~s):~n~p~n",
+ "closing AMQP connection ~p (~s):~n~p",
[self(), Name, Ex]).
log_connection_exception_with_severity(Severity, Fmt, Args) ->
@@ -508,7 +505,7 @@ mainloop(Deb, Buf, BufLen, State = #v1{sock = Sock,
%%
%% The goal is to not log TCP healthchecks (a connection
%% with no data received) unless specified otherwise.
- Fmt = "accepting AMQP connection ~p (~s)~n",
+ Fmt = "accepting AMQP connection ~p (~s)",
Args = [self(), ConnName],
case Recv of
closed -> rabbit_log_connection:debug(Fmt, Args);
@@ -756,7 +753,7 @@ wait_for_channel_termination(N, TimerRef,
rabbit_log_connection:error(
"Error on AMQP connection ~p (~s, vhost: '~s',"
" user: '~s', state: ~p), channel ~p:"
- "error while terminating:~n~p~n",
+ "error while terminating:~n~p",
[self(), ConnName, VHost, User#user.username,
CS, Channel, Reason]),
handle_uncontrolled_channel_close(ChPid),
@@ -797,7 +794,7 @@ log_hard_error(#v1{connection_state = CS,
vhost = VHost}}, Channel, Reason) ->
rabbit_log_connection:error(
"Error on AMQP connection ~p (~s, vhost: '~s',"
- " user: '~s', state: ~p), channel ~p:~n ~s~n",
+ " user: '~s', state: ~p), channel ~p:~n ~s",
[self(), ConnName, VHost, User#user.username, CS, Channel, format_hard_error(Reason)]).
handle_exception(State = #v1{connection_state = closed}, Channel, Reason) ->
@@ -816,7 +813,7 @@ handle_exception(State = #v1{connection = #connection{protocol = Protocol,
Channel, Reason = #amqp_error{name = access_refused,
explanation = ErrMsg}) ->
rabbit_log_connection:error(
- "Error on AMQP connection ~p (~s, state: ~p):~n~s~n",
+ "Error on AMQP connection ~p (~s, state: ~p):~n~s",
[self(), ConnName, starting, ErrMsg]),
%% respect authentication failure notification capability
case rabbit_misc:table_lookup(Capabilities,
@@ -835,7 +832,7 @@ handle_exception(State = #v1{connection = #connection{protocol = Protocol,
Channel, Reason = #amqp_error{name = not_allowed,
explanation = ErrMsg}) ->
rabbit_log_connection:error(
- "Error on AMQP connection ~p (~s, user: '~s', state: ~p):~n~s~n",
+ "Error on AMQP connection ~p (~s, user: '~s', state: ~p):~n~s",
[self(), ConnName, User#user.username, opening, ErrMsg]),
send_error_on_channel0_and_close(Channel, Protocol, Reason, State);
handle_exception(State = #v1{connection = #connection{protocol = Protocol},
@@ -853,7 +850,7 @@ handle_exception(State = #v1{connection = #connection{protocol = Protocol,
explanation = ErrMsg}) ->
rabbit_log_connection:error(
"Error on AMQP connection ~p (~s,"
- " user: '~s', state: ~p):~n~s~n",
+ " user: '~s', state: ~p):~n~s",
[self(), ConnName, User#user.username, tuning, ErrMsg]),
send_error_on_channel0_and_close(Channel, Protocol, Reason, State);
handle_exception(State, Channel, Reason) ->
@@ -1256,7 +1253,7 @@ handle_method0(#'connection.open'{virtual_host = VHost},
maybe_emit_stats(State1),
rabbit_log_connection:info(
"connection ~p (~s): "
- "user '~s' authenticated and granted access to vhost '~s'~n",
+ "user '~s' authenticated and granted access to vhost '~s'",
[self(), dynamic_connection_name(ConnName), Username, VHost]),
State1;
handle_method0(#'connection.close'{}, State) when ?IS_RUNNING(State) ->
@@ -1282,7 +1279,7 @@ handle_method0(#'connection.update_secret'{new_secret = NewSecret, reason = Reas
sock = Sock}) when ?IS_RUNNING(State) ->
rabbit_log_connection:debug(
"connection ~p (~s) of user '~s': "
- "asked to update secret, reason: ~s~n",
+ "asked to update secret, reason: ~s",
[self(), dynamic_connection_name(ConnName), Username, Reason]),
case rabbit_access_control:update_state(User, NewSecret) of
{ok, User1} ->
@@ -1299,15 +1296,15 @@ handle_method0(#'connection.update_secret'{new_secret = NewSecret, reason = Reas
ok = send_on_channel0(Sock, #'connection.update_secret_ok'{}, Protocol),
rabbit_log_connection:info(
"connection ~p (~s): "
- "user '~s' updated secret, reason: ~s~n",
+ "user '~s' updated secret, reason: ~s",
[self(), dynamic_connection_name(ConnName), Username, Reason]),
State#v1{connection = Conn#connection{user = User1}};
{refused, Message} ->
- rabbit_log_connection:error("Secret update was refused for user '~p': ~p",
+ rabbit_log_connection:error("Secret update was refused for user '~s': ~p",
[Username, Message]),
rabbit_misc:protocol_error(not_allowed, "New secret was refused by one of the backends", []);
{error, Message} ->
- rabbit_log_connection:error("Secret update for user '~p' failed: ~p",
+ rabbit_log_connection:error("Secret update for user '~s' failed: ~p",
[Username, Message]),
rabbit_misc:protocol_error(not_allowed,
"Secret update failed", [])
@@ -1413,7 +1410,16 @@ auth_phase(Response,
auth_mechanism = {Name, AuthMechanism},
auth_state = AuthState},
sock = Sock}) ->
- RemoteAddress = list_to_binary(inet:ntoa(Connection#connection.host)),
+ rabbit_log:debug("Raw client connection hostname during authN phase: ~p", [Connection#connection.host]),
+ RemoteAddress = case Connection#connection.host of
+ %% the hostname was already resolved, e.g. by reverse DNS lookups
+ Bin when is_binary(Bin) -> Bin;
+ %% the hostname is an IP address
+ Tuple when is_tuple(Tuple) ->
+ rabbit_data_coercion:to_binary(inet:ntoa(Connection#connection.host));
+ Other -> rabbit_data_coercion:to_binary(Other)
+ end,
+ rabbit_log:debug("Resolved client hostname during authN phase: ~s", [RemoteAddress]),
case AuthMechanism:handle_response(Response, AuthState) of
{refused, Username, Msg, Args} ->
rabbit_core_metrics:auth_attempt_failed(RemoteAddress, Username, amqp091),
@@ -1500,7 +1506,8 @@ i(SockStat, S) when SockStat =:= recv_oct;
SockStat =:= send_pend ->
socket_info(fun (Sock) -> rabbit_net:getstat(Sock, [SockStat]) end,
fun ([{_, I}]) -> I end, S);
-i(ssl, #v1{sock = Sock}) -> rabbit_net:is_ssl(Sock);
+i(ssl, #v1{sock = Sock, proxy_socket = ProxySock}) ->
+ rabbit_net:proxy_ssl_info(Sock, ProxySock) /= nossl;
i(ssl_protocol, S) -> ssl_info(fun ({P, _}) -> P end, S);
i(ssl_key_exchange, S) -> ssl_info(fun ({_, {K, _, _}}) -> K end, S);
i(ssl_cipher, S) -> ssl_info(fun ({_, {_, C, _}}) -> C end, S);
@@ -1570,8 +1577,8 @@ socket_info(Get, Select, #v1{sock = Sock}) ->
{error, _} -> 0
end.
-ssl_info(F, #v1{sock = Sock}) ->
- case rabbit_net:ssl_info(Sock) of
+ssl_info(F, #v1{sock = Sock, proxy_socket = ProxySock}) ->
+ case rabbit_net:proxy_ssl_info(Sock, ProxySock) of
nossl -> '';
{error, _} -> '';
{ok, Items} ->
@@ -1763,7 +1770,7 @@ augment_connection_log_name(#connection{name = Name} = Connection) ->
Connection;
UserSpecifiedName ->
LogName = <<Name/binary, " - ", UserSpecifiedName/binary>>,
- rabbit_log_connection:info("Connection ~p (~s) has a client-provided name: ~s~n", [self(), Name, UserSpecifiedName]),
+ rabbit_log_connection:info("Connection ~p (~s) has a client-provided name: ~s", [self(), Name, UserSpecifiedName]),
?store_proc_name(LogName),
Connection#connection{log_name = LogName}
end.
diff --git a/deps/rabbit/src/rabbit_recovery_terms.erl b/deps/rabbit/src/rabbit_recovery_terms.erl
index d89de9ece3..5d4546c481 100644
--- a/deps/rabbit/src/rabbit_recovery_terms.erl
+++ b/deps/rabbit/src/rabbit_recovery_terms.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% We use a gen_server simply so that during the terminate/2 call
@@ -26,7 +26,7 @@
-rabbit_upgrade({upgrade_recovery_terms, local, []}).
-rabbit_upgrade({persistent_bytes, local, [upgrade_recovery_terms]}).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%%----------------------------------------------------------------------------
diff --git a/deps/rabbit/src/rabbit_restartable_sup.erl b/deps/rabbit/src/rabbit_restartable_sup.erl
index 46fcace99f..0acfc611f8 100644
--- a/deps/rabbit/src/rabbit_restartable_sup.erl
+++ b/deps/rabbit/src/rabbit_restartable_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_restartable_sup).
@@ -13,7 +13,7 @@
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-define(DELAY, 2).
diff --git a/deps/rabbit/src/rabbit_router.erl b/deps/rabbit/src/rabbit_router.erl
index ed170bcd8e..cce6093c92 100644
--- a/deps/rabbit/src/rabbit_router.erl
+++ b/deps/rabbit/src/rabbit_router.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_router).
-include_lib("stdlib/include/qlc.hrl").
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([match_bindings/2, match_routing_key/2]).
@@ -18,14 +18,11 @@
-type routing_key() :: binary().
-type match_result() :: [rabbit_types:binding_destination()].
+%%----------------------------------------------------------------------------
+
-spec match_bindings(rabbit_types:binding_source(),
fun ((rabbit_types:binding()) -> boolean())) ->
match_result().
--spec match_routing_key(rabbit_types:binding_source(),
- [routing_key()] | ['_']) ->
- match_result().
-
-%%----------------------------------------------------------------------------
match_bindings(SrcName, Match) ->
MatchHead = #route{binding = #binding{source = SrcName,
@@ -34,6 +31,10 @@ match_bindings(SrcName, Match) ->
[Dest || [#route{binding = Binding = #binding{destination = Dest}}] <-
Routes, Match(Binding)].
+-spec match_routing_key(rabbit_types:binding_source(),
+ [routing_key()] | ['_']) ->
+ match_result().
+
match_routing_key(SrcName, [RoutingKey]) ->
find_routes(#route{binding = #binding{source = SrcName,
destination = '$1',
diff --git a/deps/rabbit/src/rabbit_runtime_parameters.erl b/deps/rabbit/src/rabbit_runtime_parameters.erl
index 1870b5dfa5..60f37b9dc7 100644
--- a/deps/rabbit/src/rabbit_runtime_parameters.erl
+++ b/deps/rabbit/src/rabbit_runtime_parameters.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_runtime_parameters).
@@ -40,7 +40,7 @@
%% * rabbit_registry
%% * rabbit_event
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([parse_set/5, set/5, set_any/5, clear/4, clear_any/4, list/0, list/1,
list_component/1, list/2, list_formatted/1, list_formatted/3,
@@ -55,35 +55,6 @@
-type ok_or_error_string() :: 'ok' | {'error_string', string()}.
-type ok_thunk_or_error_string() :: ok_or_error_string() | fun(() -> 'ok').
--spec parse_set(rabbit_types:vhost(), binary(), binary(), string(),
- rabbit_types:user() | rabbit_types:username() | 'none')
- -> ok_or_error_string().
--spec set(rabbit_types:vhost(), binary(), binary(), term(),
- rabbit_types:user() | rabbit_types:username() | 'none')
- -> ok_or_error_string().
--spec set_any(rabbit_types:vhost(), binary(), binary(), term(),
- rabbit_types:user() | rabbit_types:username() | 'none')
- -> ok_or_error_string().
--spec set_global(atom(), term(), rabbit_types:username()) -> 'ok'.
--spec clear(rabbit_types:vhost(), binary(), binary(), rabbit_types:username())
- -> ok_thunk_or_error_string().
--spec clear_any(rabbit_types:vhost(), binary(), binary(), rabbit_types:username())
- -> ok_thunk_or_error_string().
--spec list() -> [rabbit_types:infos()].
--spec list(rabbit_types:vhost() | '_') -> [rabbit_types:infos()].
--spec list_component(binary()) -> [rabbit_types:infos()].
--spec list(rabbit_types:vhost() | '_', binary() | '_')
- -> [rabbit_types:infos()].
--spec list_formatted(rabbit_types:vhost()) -> [rabbit_types:infos()].
--spec list_formatted(rabbit_types:vhost(), reference(), pid()) -> 'ok'.
--spec lookup(rabbit_types:vhost(), binary(), binary())
- -> rabbit_types:infos() | 'not_found'.
--spec value(rabbit_types:vhost(), binary(), binary()) -> term().
--spec value(rabbit_types:vhost(), binary(), binary(), term()) -> term().
--spec value_global(atom()) -> term() | 'not_found'.
--spec value_global(atom(), term()) -> term().
--spec info_keys() -> rabbit_types:info_keys().
-
%%---------------------------------------------------------------------------
-import(rabbit_misc, [pget/2]).
@@ -92,6 +63,10 @@
%%---------------------------------------------------------------------------
+-spec parse_set(rabbit_types:vhost(), binary(), binary(), string(),
+ rabbit_types:user() | rabbit_types:username() | 'none')
+ -> ok_or_error_string().
+
parse_set(_, <<"policy">>, _, _, _) ->
{error_string, "policies may not be set using this method"};
parse_set(VHost, Component, Name, String, User) ->
@@ -104,6 +79,10 @@ parse_set(VHost, Component, Name, String, User) ->
rabbit_misc:format("JSON decoding error. Reason: ~ts", [Reason])}
end.
+-spec set(rabbit_types:vhost(), binary(), binary(), term(),
+ rabbit_types:user() | rabbit_types:username() | 'none')
+ -> ok_or_error_string().
+
set(_, <<"policy">>, _, _, _) ->
{error_string, "policies may not be set using this method"};
set(VHost, Component, Name, Term, User) ->
@@ -119,6 +98,8 @@ parse_set_global(Name, String, ActingUser) ->
rabbit_misc:format("JSON decoding error. Reason: ~ts", [Reason])}
end.
+-spec set_global(atom(), term(), rabbit_types:username()) -> 'ok'.
+
set_global(Name, Term, ActingUser) ->
NameAsAtom = rabbit_data_coercion:to_atom(Name),
rabbit_log:debug("Setting global parameter '~s' to ~p", [NameAsAtom, Term]),
@@ -131,6 +112,10 @@ set_global(Name, Term, ActingUser) ->
format_error(L) ->
{error_string, rabbit_misc:format_many([{"Validation failed~n", []} | L])}.
+-spec set_any(rabbit_types:vhost(), binary(), binary(), term(),
+ rabbit_types:user() | rabbit_types:username() | 'none')
+ -> ok_or_error_string().
+
set_any(VHost, Component, Name, Term, User) ->
case set_any0(VHost, Component, Name, Term, User) of
ok -> ok;
@@ -196,6 +181,9 @@ mnesia_update_fun(Key, Term) ->
Res
end.
+-spec clear(rabbit_types:vhost(), binary(), binary(), rabbit_types:username())
+ -> ok_thunk_or_error_string().
+
clear(_, <<"policy">> , _, _) ->
{error_string, "policies may not be cleared using this method"};
clear(VHost, Component, Name, ActingUser) ->
@@ -235,6 +223,9 @@ clear_component(Component, ActingUser) ->
ok
end.
+-spec clear_any(rabbit_types:vhost(), binary(), binary(), rabbit_types:username())
+ -> ok_thunk_or_error_string().
+
clear_any(VHost, Component, Name, ActingUser) ->
Notify = fun () ->
case lookup_component(Component) of
@@ -269,15 +260,25 @@ event_notify(Event, VHost, Component, Props) ->
rabbit_event:notify(Event, [{vhost, VHost},
{component, Component} | Props]).
+-spec list() -> [rabbit_types:infos()].
+
list() ->
[p(P) || #runtime_parameters{ key = {_VHost, Comp, _Name}} = P <-
rabbit_misc:dirty_read_all(?TABLE), Comp /= <<"policy">>].
-list(VHost) -> list(VHost, '_').
+-spec list(rabbit_types:vhost() | '_') -> [rabbit_types:infos()].
+
+list(VHost) -> list(VHost, '_').
+
+-spec list_component(binary()) -> [rabbit_types:infos()].
+
list_component(Component) -> list('_', Component).
%% Not dirty_match_object since that would not be transactional when used in a
%% tx context
+-spec list(rabbit_types:vhost() | '_', binary() | '_')
+ -> [rabbit_types:infos()].
+
list(VHost, Component) ->
mnesia:async_dirty(
fun () ->
@@ -301,6 +302,8 @@ list_global() ->
is_atom(P#runtime_parameters.key)]
end).
+-spec list_formatted(rabbit_types:vhost()) -> [rabbit_types:infos()].
+
list_formatted(VHost) ->
[ format_parameter(info_keys(), P) || P <- list(VHost) ].
@@ -316,6 +319,8 @@ format_parameter(InfoKeys, P) ->
end,
[], InfoKeys).
+-spec list_formatted(rabbit_types:vhost(), reference(), pid()) -> 'ok'.
+
list_formatted(VHost, Ref, AggregatorPid) ->
rabbit_control_misc:emitting_map(
AggregatorPid, Ref,
@@ -329,6 +334,9 @@ list_global_formatted(Ref, AggregatorPid) ->
AggregatorPid, Ref,
fun(P) -> format_parameter(global_info_keys(), P) end, list_global()).
+-spec lookup(rabbit_types:vhost(), binary(), binary())
+ -> rabbit_types:infos() | 'not_found'.
+
lookup(VHost, Component, Name) ->
case lookup0({VHost, Component, Name}, rabbit_misc:const(not_found)) of
not_found -> not_found;
@@ -341,12 +349,21 @@ lookup_global(Name) ->
Params -> p(Params)
end.
-value(VHost, Comp, Name) -> value0({VHost, Comp, Name}).
+-spec value(rabbit_types:vhost(), binary(), binary()) -> term().
+
+value(VHost, Comp, Name) -> value0({VHost, Comp, Name}).
+
+-spec value(rabbit_types:vhost(), binary(), binary(), term()) -> term().
+
value(VHost, Comp, Name, Def) -> value0({VHost, Comp, Name}, Def).
+-spec value_global(atom()) -> term() | 'not_found'.
+
value_global(Key) ->
value0(Key).
+-spec value_global(atom(), term()) -> term().
+
value_global(Key, Default) ->
value0(Key, Default).
@@ -391,6 +408,8 @@ p(#runtime_parameters{key = Key, value = Value}) when is_atom(Key) ->
[{name, Key},
{value, Value}].
+-spec info_keys() -> rabbit_types:info_keys().
+
info_keys() -> [component, name, value].
global_info_keys() -> [name, value].
diff --git a/deps/rabbit/src/rabbit_ssl.erl b/deps/rabbit/src/rabbit_ssl.erl
index 84670b0a19..9570059373 100644
--- a/deps/rabbit/src/rabbit_ssl.erl
+++ b/deps/rabbit/src/rabbit_ssl.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_ssl).
@@ -28,6 +28,8 @@
{ssl_cipher_format, suite_map_to_openssl_str, 1},
{ssl_cipher_format, suite_map_to_bin, 1}]).
+-dialyzer({nowarn_function, peer_cert_auth_name/2}).
+
-type certificate() :: rabbit_cert_info:certificate().
-type cipher_suites_mode() :: default | all | anonymous.
@@ -71,34 +73,11 @@ cipher_suites_openssl(Mode, Version) ->
format_cipher_erlang(Cipher) ->
- case erlang:function_exported(ssl_cipher_format, suite_map_to_bin, 1) of
- true ->
- format_cipher_erlang22(Cipher);
- false ->
- format_cipher_erlang21(Cipher)
- end.
-
-format_cipher_erlang22(Cipher) ->
ssl_cipher_format:suite_legacy(ssl_cipher_format:suite_map_to_bin(Cipher)).
-format_cipher_erlang21(Cipher) ->
- ssl_cipher_format:erl_suite_definition(ssl_cipher_format:suite(Cipher)).
-
-
format_cipher_openssl(Cipher) ->
- case erlang:function_exported(ssl_cipher_format, suite_map_to_bin, 1) of
- true ->
- format_cipher_openssl22(Cipher);
- false ->
- format_cipher_openssl21(Cipher)
- end.
-
-format_cipher_openssl22(Cipher) ->
ssl_cipher_format:suite_map_to_openssl_str(Cipher).
-format_cipher_openssl21(Cipher) ->
- ssl_cipher_format:suite_to_str(Cipher).
-
-spec get_highest_protocol_version() -> tls_record:tls_atom_version().
get_highest_protocol_version() ->
tls_record:protocol_version(
@@ -130,13 +109,12 @@ peer_cert_validity(Cert) ->
rabbit_cert_info:validity(Cert).
%% Extract a username from the certificate
--spec peer_cert_auth_name
- (certificate()) -> binary() | 'not_found' | 'unsafe'.
-
+-spec peer_cert_auth_name(certificate()) -> binary() | 'not_found' | 'unsafe'.
peer_cert_auth_name(Cert) ->
{ok, Mode} = application:get_env(rabbit, ssl_cert_login_from),
peer_cert_auth_name(Mode, Cert).
+-spec peer_cert_auth_name(atom(), certificate()) -> binary() | 'not_found' | 'unsafe'.
peer_cert_auth_name(distinguished_name, Cert) ->
case auth_config_sane() of
true -> iolist_to_binary(peer_cert_subject(Cert));
@@ -158,8 +136,17 @@ peer_cert_auth_name(subject_alternative_name, Cert) ->
0 -> not_found;
N when N < Index -> not_found;
N when N >= Index ->
- {_, Value} = lists:nth(Index, OfType),
- rabbit_data_coercion:to_binary(Value)
+ Nth = lists:nth(Index, OfType),
+ case Nth of
+ %% this is SAN of type otherName; it can be anything, so we simply try to extract the value
+ %% the best we can and return it. There aren't really any conventions or widely held expectations
+ %% about the format :(
+ {otherName, {'AnotherName', _, Value}} ->
+ rabbit_cert_info:sanitize_other_name(rabbit_data_coercion:to_binary(Value));
+ %% most SAN types return a pair: DNS, email, URI
+ {_, Value} ->
+ rabbit_data_coercion:to_binary(Value)
+ end
end;
false -> unsafe
end;
@@ -193,3 +180,4 @@ otp_san_type(email) -> rfc822Name;
otp_san_type(uri) -> uniformResourceIdentifier;
otp_san_type(other_name) -> otherName;
otp_san_type(Other) -> Other.
+
diff --git a/deps/rabbit/src/rabbit_stream_coordinator.erl b/deps/rabbit/src/rabbit_stream_coordinator.erl
index 9e4890c894..f88822a54f 100644
--- a/deps/rabbit/src/rabbit_stream_coordinator.erl
+++ b/deps/rabbit/src/rabbit_stream_coordinator.erl
@@ -1,22 +1,14 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
+
-module(rabbit_stream_coordinator).
-behaviour(ra_machine).
--export([start/0]).
-export([format_ra_event/2]).
-export([init/1,
@@ -27,106 +19,244 @@
tick/2]).
-export([recover/0,
- start_cluster/1,
- delete_cluster/2,
add_replica/2,
- delete_replica/2]).
+ delete_replica/2,
+ register_listener/1]).
+
+-export([new_stream/2,
+ delete_stream/2]).
-export([policy_changed/1]).
--export([phase_repair_mnesia/2,
- phase_start_cluster/1,
- phase_delete_cluster/2,
- phase_check_quorum/1,
- phase_start_new_leader/1,
- phase_stop_replicas/1,
- phase_start_replica/3,
- phase_delete_replica/2]).
+-export([local_pid/1,
+ members/1]).
+-export([query_local_pid/3,
+ query_members/2]).
+
-export([log_overview/1]).
+-export([replay/1]).
--define(STREAM_COORDINATOR_STARTUP, {stream_coordinator_startup, self()}).
--define(TICK_TIMEOUT, 60000).
--define(RESTART_TIMEOUT, 1000).
--define(PHASE_RETRY_TIMEOUT, 10000).
--define(CMD_TIMEOUT, 30000).
-
--record(?MODULE, {streams, monitors}).
-
-start() ->
- Nodes = rabbit_mnesia:cluster_nodes(all),
- ServerId = {?MODULE, node()},
- case ra:restart_server(ServerId) of
- {error, Reason} when Reason == not_started orelse
- Reason == name_not_registered ->
- case ra:start_server(make_ra_conf(node(), Nodes)) of
- ok ->
- global:set_lock(?STREAM_COORDINATOR_STARTUP),
- case find_members(Nodes) of
- [] ->
- %% We're the first (and maybe only) one
- ra:trigger_election(ServerId);
- Members ->
- %% What to do if we get a timeout?
- {ok, _, _} = ra:add_member(Members, ServerId, 30000)
- end,
- global:del_lock(?STREAM_COORDINATOR_STARTUP),
- _ = ra:members(ServerId),
+-rabbit_boot_step({?MODULE,
+ [{description, "Restart stream coordinator"},
+ {mfa, {?MODULE, recover, []}},
+ {requires, core_initialized},
+ {enables, recovery}]}).
+
+%% exported for unit tests only
+-ifdef(TEST).
+-export([update_stream/3,
+ evaluate_stream/3]).
+-endif.
+
+-include("rabbit_stream_coordinator.hrl").
+-include("amqqueue.hrl").
+
+-define(REPLICA_FRESHNESS_LIMIT_MS, 10 * 1000). %% 10s
+
+-type state() :: #?MODULE{}.
+-type args() :: #{index := ra:index(),
+ node := node(),
+ epoch := osiris:epoch()}.
+
+-type command() :: {new_stream, stream_id(), #{leader_node := node(),
+ queue := amqqueue:amqqueue()}} |
+ {delete_stream, stream_id(), #{}} |
+ {add_replica, stream_id(), #{node := node()}} |
+ {delete_replica, stream_id(), #{node := node()}} |
+ {policy_changed, stream_id(), #{queue := amqqueue:amqqueue()}} |
+ {register_listener, #{pid := pid(),
+ stream_id := stream_id(),
+ queue_ref := queue_ref()}} |
+ {action_failed, stream_id(), #{index := ra:index(),
+ node := node(),
+ epoch := osiris:epoch(),
+ action := atom(), %% TODO: refine
+ term() => term()}} |
+ {member_started, stream_id(), #{index := ra:index(),
+ node := node(),
+ epoch := osiris:epoch(),
+ pid := pid()}} |
+ {member_stopped, stream_id(), args()} |
+ {retention_updated, stream_id(), args()} |
+ {mnesia_updated, stream_id(), args()} |
+ ra_machine:effect().
+
+-export_type([command/0]).
+
+recover() ->
+ case erlang:whereis(?MODULE) of
+ undefined ->
+ case ra:restart_server(?RA_SYSTEM, {?MODULE, node()}) of
+ {error, Reason} when Reason == not_started;
+ Reason == name_not_registered ->
+ %% First boot, do nothing and wait until the first `declare`
ok;
- Error ->
- exit(Error)
+ _ ->
+ ok
end;
- ok ->
- ok;
- Error ->
- exit(Error)
+ _ ->
+ ok
end.
-find_members([]) ->
- [];
-find_members([Node | Nodes]) ->
- case ra:members({?MODULE, Node}) of
- {_, Members, _} ->
- Members;
- {error, noproc} ->
- find_members(Nodes);
- {timeout, _} ->
- %% not sure what to do here
- find_members(Nodes)
+%% new api
+
+new_stream(Q, LeaderNode)
+ when ?is_amqqueue(Q) andalso is_atom(LeaderNode) ->
+ #{name := StreamId,
+ nodes := Nodes} = amqqueue:get_type_state(Q),
+ %% assertion leader is in nodes configuration
+ true = lists:member(LeaderNode, Nodes),
+ process_command({new_stream, StreamId,
+ #{leader_node => LeaderNode,
+ queue => Q}}).
+
+delete_stream(Q, ActingUser)
+ when ?is_amqqueue(Q) ->
+ #{name := StreamId} = amqqueue:get_type_state(Q),
+ case process_command({delete_stream, StreamId, #{}}) of
+ {ok, ok, _} ->
+ QName = amqqueue:get_name(Q),
+ _ = rabbit_amqqueue:internal_delete(QName, ActingUser),
+ {ok, {ok, 0}};
+ Err ->
+ Err
end.
-recover() ->
- ra:restart_server({?MODULE, node()}).
+-spec add_replica(amqqueue:amqqueue(), node()) ->
+ ok | {error, term()}.
+add_replica(Q, Node) when ?is_amqqueue(Q) ->
+ %% performing safety check
+ %% if any replica is stale then we should not allow
+ %% further replicas to be added
+ Pid = amqqueue:get_pid(Q),
+ try
+ ReplState0 = osiris_writer:query_replication_state(Pid),
+ {{_, InitTs}, ReplState} = maps:take(node(Pid), ReplState0),
+ {MaxTs, MinTs} = maps:fold(fun (_, {_, Ts}, {Max, Min}) ->
+ {max(Ts, Max), min(Ts, Min)}
+ end, {InitTs, InitTs}, ReplState),
+ case (MaxTs - MinTs) > ?REPLICA_FRESHNESS_LIMIT_MS of
+ true ->
+ {error, {disallowed, out_of_sync_replica}};
+ false ->
+ Name = rabbit_misc:rs(amqqueue:get_name(Q)),
+ rabbit_log:info("~s : adding replica ~s to ~s Replication State: ~w",
+ [?MODULE, Node, Name, ReplState0]),
+ StreamId = maps:get(name, amqqueue:get_type_state(Q)),
+ case process_command({add_replica, StreamId, #{node => Node}}) of
+ {ok, Result, _} ->
+ Result;
+ Err ->
+ Err
+ end
+ end
+ catch
+ _:Error ->
+ {error, Error}
+ end.
-start_cluster(Q) ->
- process_command({start_cluster, #{queue => Q}}).
+delete_replica(StreamId, Node) ->
+ process_command({delete_replica, StreamId, #{node => Node}}).
-delete_cluster(StreamId, ActingUser) ->
- process_command({delete_cluster, #{stream_id => StreamId, acting_user => ActingUser}}).
+policy_changed(Q) when ?is_amqqueue(Q) ->
+ StreamId = maps:get(name, amqqueue:get_type_state(Q)),
+ process_command({policy_changed, StreamId, #{queue => Q}}).
-add_replica(StreamId, Node) ->
- process_command({start_replica, #{stream_id => StreamId, node => Node,
- retries => 1}}).
+local_pid(StreamId) when is_list(StreamId) ->
+ MFA = {?MODULE, query_local_pid, [StreamId, node()]},
+ case ra:local_query({?MODULE, node()}, MFA) of
+ {ok, {_, {ok, Pid}}, _} ->
+ case is_process_alive(Pid) of
+ true ->
+ {ok, Pid};
+ false ->
+ case ra:consistent_query({?MODULE, node()}, MFA) of
+ {ok, Result, _} ->
+ Result;
+ {error, _} = Err ->
+ Err;
+ {timeout, _} ->
+ {error, timeout}
+ end
+ end;
+ {ok, {_, Result}, _} ->
+ Result;
+ {error, _} = Err ->
+ Err;
+ {timeout, _} ->
+ {error, timeout}
+ end.
-policy_changed(StreamId) ->
- process_command({policy_changed, #{stream_id => StreamId}}).
+-spec members(stream_id()) ->
+ {ok, #{node() := {pid() | undefined, writer | replica}}} |
+ {error, not_found}.
+members(StreamId) when is_list(StreamId) ->
+ MFA = {?MODULE, query_members, [StreamId]},
+ case ra:local_query({?MODULE, node()}, MFA) of
+ {ok, {_, {ok, _} = Result}, _} ->
+ Result;
+ {ok, {_, {error, not_found}}, _} ->
+ %% fall back to consistent query
+ case ra:consistent_query({?MODULE, node()}, MFA) of
+ {ok, Result, _} ->
+ Result;
+ {error, _} = Err ->
+ Err;
+ {timeout, _} ->
+ {error, timeout}
+ end;
+ {ok, {_, Result}, _} ->
+ Result;
+ {error, _} = Err ->
+ Err;
+ {timeout, _} ->
+ {error, timeout}
+ end.
-delete_replica(StreamId, Node) ->
- process_command({delete_replica, #{stream_id => StreamId, node => Node}}).
+query_members(StreamId, #?MODULE{streams = Streams}) ->
+ case Streams of
+ #{StreamId := #stream{members = Members}} ->
+ {ok, maps:map(
+ fun (_, #member{state = {running, _, Pid},
+ role = {Role, _}}) ->
+ {Pid, Role};
+ (_, #member{role = {Role, _}}) ->
+ {undefined, Role}
+ end, Members)};
+ _ ->
+ {error, not_found}
+ end.
+
+query_local_pid(StreamId, Node, #?MODULE{streams = Streams}) ->
+ case Streams of
+ #{StreamId := #stream{members =
+ #{Node := #member{state =
+ {running, _, Pid}}}}} ->
+ {ok, Pid};
+ _ ->
+ {error, not_found}
+ end.
+
+-spec register_listener(amqqueue:amqqueue()) ->
+ {error, term()} | {ok, ok | stream_not_found, atom() | {atom(), atom()}}.
+register_listener(Q) when ?is_amqqueue(Q)->
+ #{name := StreamId} = amqqueue:get_type_state(Q),
+ process_command({register_listener,
+ #{pid => self(),
+ stream_id => StreamId}}).
process_command(Cmd) ->
- global:set_lock(?STREAM_COORDINATOR_STARTUP),
Servers = ensure_coordinator_started(),
- global:del_lock(?STREAM_COORDINATOR_STARTUP),
process_command(Servers, Cmd).
process_command([], _Cmd) ->
{error, coordinator_unavailable};
-process_command([Server | Servers], {CmdName, _} = Cmd) ->
+process_command([Server | Servers], Cmd) ->
case ra:process_command(Server, Cmd, ?CMD_TIMEOUT) of
{timeout, _} ->
- rabbit_log:warning("Coordinator timeout on server ~p when processing command ~p",
- [Server, CmdName]),
+ rabbit_log:warning("Coordinator timeout on server ~w when processing command ~W",
+ [element(2, Server), element(1, Cmd), 10]),
process_command(Servers, Cmd);
{error, noproc} ->
process_command(Servers, Cmd);
@@ -136,422 +266,205 @@ process_command([Server | Servers], {CmdName, _} = Cmd) ->
ensure_coordinator_started() ->
Local = {?MODULE, node()},
- AllNodes = all_nodes(),
- case ra:restart_server(Local) of
- {error, Reason} when Reason == not_started orelse
- Reason == name_not_registered ->
- OtherNodes = all_nodes() -- [Local],
- %% We can't use find_members/0 here as a process that timeouts means the cluster is up
- case lists:filter(fun(N) -> global:whereis_name(N) =/= undefined end, OtherNodes) of
- [] ->
- start_coordinator_cluster();
+ AllNodes = all_coord_members(),
+ case whereis(?MODULE) of
+ undefined ->
+ global:set_lock(?STREAM_COORDINATOR_STARTUP),
+ Nodes = case ra:restart_server(?RA_SYSTEM, Local) of
+ {error, Reason} when Reason == not_started orelse
+ Reason == name_not_registered ->
+ OtherNodes = all_coord_members() -- [Local],
+ %% We can't use find_members/0 here as a process that timeouts means the cluster is up
+ case lists:filter(fun(N) -> global:whereis_name(N) =/= undefined end, OtherNodes) of
+ [] ->
+ start_coordinator_cluster();
+ _ ->
+ OtherNodes
+ end;
+ ok ->
+ AllNodes;
+ {error, {already_started, _}} ->
+ AllNodes;
_ ->
- OtherNodes
- end;
- ok ->
- AllNodes;
- {error, {already_started, _}} ->
- AllNodes;
+ AllNodes
+ end,
+ global:del_lock(?STREAM_COORDINATOR_STARTUP),
+ Nodes;
_ ->
AllNodes
end.
start_coordinator_cluster() ->
Nodes = rabbit_mnesia:cluster_nodes(running),
- case ra:start_cluster([make_ra_conf(Node, Nodes) || Node <- Nodes]) of
+ rabbit_log:debug("Starting stream coordinator on nodes: ~w", [Nodes]),
+ case ra:start_cluster(?RA_SYSTEM, [make_ra_conf(Node, Nodes) || Node <- Nodes]) of
{ok, Started, _} ->
+ rabbit_log:debug("Started stream coordinator on ~w", [Started]),
Started;
{error, cluster_not_formed} ->
- rabbit_log:warning("Stream coordinator cluster not formed", []),
+ rabbit_log:warning("Stream coordinator could not be started on nodes ~w",
+ [Nodes]),
[]
end.
-all_nodes() ->
+all_coord_members() ->
Nodes = rabbit_mnesia:cluster_nodes(running) -- [node()],
[{?MODULE, Node} || Node <- [node() | Nodes]].
init(_Conf) ->
- #?MODULE{streams = #{},
- monitors = #{}}.
-
-apply(#{from := From}, {policy_changed, #{stream_id := StreamId}} = Cmd,
- #?MODULE{streams = Streams0} = State) ->
- case maps:get(StreamId, Streams0, undefined) of
- undefined ->
- {State, ok, []};
- #{conf := Conf,
- state := running} ->
- case rabbit_stream_queue:update_stream_conf(Conf) of
- Conf ->
- %% No changes, ensure we only trigger an election if it's a must
- {State, ok, []};
- _ ->
- {State, ok, [{mod_call, osiris_writer, stop, [Conf]}]}
- end;
- SState0 ->
- Streams = maps:put(StreamId, add_pending_cmd(From, Cmd, SState0), Streams0),
- {State#?MODULE{streams = Streams}, '$ra_no_reply', []}
+ #?MODULE{}.
- end;
-apply(#{from := From}, {start_cluster, #{queue := Q}}, #?MODULE{streams = Streams} = State) ->
- #{name := StreamId} = Conf0 = amqqueue:get_type_state(Q),
- Conf = apply_leader_locator_strategy(Conf0, Streams),
- case maps:is_key(StreamId, Streams) of
- true ->
- {State, '$ra_no_reply', wrap_reply(From, {error, already_started})};
- false ->
- Phase = phase_start_cluster,
- PhaseArgs = [amqqueue:set_type_state(Q, Conf)],
- SState = #{state => start_cluster,
- phase => Phase,
- phase_args => PhaseArgs,
- conf => Conf,
- reply_to => From,
- pending_cmds => [],
- pending_replicas => []},
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering phase_start_cluster", [StreamId]),
- {State#?MODULE{streams = maps:put(StreamId, SState, Streams)}, '$ra_no_reply',
- [{aux, {phase, StreamId, Phase, PhaseArgs}}]}
- end;
-apply(_Meta, {start_cluster_reply, Q}, #?MODULE{streams = Streams,
- monitors = Monitors0} = State) ->
- #{name := StreamId,
- leader_pid := LeaderPid,
- replica_pids := ReplicaPids} = Conf = amqqueue:get_type_state(Q),
- SState0 = maps:get(StreamId, Streams),
- Phase = phase_repair_mnesia,
- PhaseArgs = [new, Q],
- SState = SState0#{conf => Conf,
- phase => Phase,
- phase_args => PhaseArgs},
- Monitors = lists:foldl(fun(Pid, M) ->
- maps:put(Pid, {StreamId, follower}, M)
- end, maps:put(LeaderPid, {StreamId, leader}, Monitors0), ReplicaPids),
- MonitorActions = [{monitor, process, Pid} || Pid <- ReplicaPids ++ [LeaderPid]],
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering ~p "
- "after start_cluster_reply", [StreamId, Phase]),
- {State#?MODULE{streams = maps:put(StreamId, SState, Streams),
- monitors = Monitors}, ok,
- MonitorActions ++ [{aux, {phase, StreamId, Phase, PhaseArgs}}]};
-apply(_Meta, {start_replica_failed, StreamId, Node, Retries, Reply},
- #?MODULE{streams = Streams0} = State) ->
- rabbit_log:debug("rabbit_stream_coordinator: ~p start replica failed", [StreamId]),
- case maps:get(StreamId, Streams0, undefined) of
- undefined ->
- {State, {error, not_found}, []};
- #{pending_replicas := Pending,
- reply_to := From} = SState ->
- Streams = Streams0#{StreamId => clear_stream_state(SState#{pending_replicas =>
- add_unique(Node, Pending)})},
- reply_and_run_pending(
- From, StreamId, ok, Reply,
- [{timer, {pipeline,
- [{start_replica, #{stream_id => StreamId,
- node => Node,
- from => undefined,
- retries => Retries + 1}}]},
- ?RESTART_TIMEOUT * Retries}],
- State#?MODULE{streams = Streams})
- end;
-apply(_Meta, {phase_finished, StreamId, Reply}, #?MODULE{streams = Streams0} = State) ->
- rabbit_log:debug("rabbit_stream_coordinator: ~p phase finished", [StreamId]),
- case maps:get(StreamId, Streams0, undefined) of
- undefined ->
- {State, {error, not_found}, []};
- #{reply_to := From} = SState ->
- Streams = Streams0#{StreamId => clear_stream_state(SState)},
- reply_and_run_pending(From, StreamId, ok, Reply, [], State#?MODULE{streams = Streams})
- end;
-apply(#{from := From}, {start_replica, #{stream_id := StreamId, node := Node,
- retries := Retries}} = Cmd,
- #?MODULE{streams = Streams0} = State) ->
- case maps:get(StreamId, Streams0, undefined) of
- undefined ->
- case From of
+-spec apply(map(), command(), state()) ->
+ {state(), term(), ra_machine:effects()}.
+apply(#{index := _Idx} = Meta0, {_CmdTag, StreamId, #{}} = Cmd,
+ #?MODULE{streams = Streams0,
+ monitors = Monitors0} = State0) ->
+ Stream0 = maps:get(StreamId, Streams0, undefined),
+ Meta = maps:without([term, machine_version], Meta0),
+ case filter_command(Meta, Cmd, Stream0) of
+ ok ->
+ Stream1 = update_stream(Meta, Cmd, Stream0),
+ Reply = case Stream1 of
+ #stream{reply_to = undefined} ->
+ ok;
+ _ ->
+ %% reply_to is set so we'll reply later
+ '$ra_no_reply'
+ end,
+ case Stream1 of
undefined ->
- {State, ok, []};
+ return(Meta, State0#?MODULE{streams = maps:remove(StreamId, Streams0)},
+ Reply, []);
_ ->
- {State, '$ra_no_reply', wrap_reply(From, {error, not_found})}
+ {Stream2, Effects0} = evaluate_stream(Meta, Stream1, []),
+ {Stream3, Effects1} = eval_listeners(Stream2, Effects0),
+ {Stream, Effects2} = eval_retention(Meta, Stream3, Effects1),
+ {Monitors, Effects} = ensure_monitors(Stream, Monitors0, Effects2),
+ return(Meta,
+ State0#?MODULE{streams = Streams0#{StreamId => Stream},
+ monitors = Monitors}, Reply, Effects)
end;
- #{conf := Conf,
- state := running} = SState0 ->
- Phase = phase_start_replica,
- PhaseArgs = [Node, Conf, Retries],
- SState = update_stream_state(From, start_replica, Phase, PhaseArgs, SState0),
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering ~p on node ~p",
- [StreamId, Phase, Node]),
- {State#?MODULE{streams = Streams0#{StreamId => SState}}, '$ra_no_reply',
- [{aux, {phase, StreamId, Phase, PhaseArgs}}]};
- SState0 ->
- Streams = maps:put(StreamId, add_pending_cmd(From, Cmd, SState0), Streams0),
- {State#?MODULE{streams = Streams}, '$ra_no_reply', []}
- end;
-apply(_Meta, {start_replica_reply, StreamId, Pid},
- #?MODULE{streams = Streams, monitors = Monitors0} = State) ->
- case maps:get(StreamId, Streams, undefined) of
- undefined ->
- {State, {error, not_found}, []};
- #{conf := Conf0} = SState0 ->
- #{replica_nodes := Replicas0,
- replica_pids := ReplicaPids0} = Conf0,
- {ReplicaPids, MaybePid} = delete_replica_pid(node(Pid), ReplicaPids0),
- Conf = Conf0#{replica_pids => [Pid | ReplicaPids],
- replica_nodes => add_unique(node(Pid), Replicas0)},
- Phase = phase_repair_mnesia,
- PhaseArgs = [update, Conf],
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering ~p after start replica", [StreamId, Phase]),
- #{pending_replicas := Pending} = SState0 = maps:get(StreamId, Streams),
- SState = SState0#{conf => Conf,
- phase => Phase,
- phase_args => PhaseArgs,
- pending_replicas => lists:delete(node(Pid), Pending)},
- Monitors1 = Monitors0#{Pid => {StreamId, follower}},
- Monitors = case MaybePid of
- [P] -> maps:remove(P, Monitors1);
- _ -> Monitors1
- end,
- {State#?MODULE{streams = Streams#{StreamId => SState},
- monitors = Monitors}, ok,
- [{monitor, process, Pid}, {aux, {phase, StreamId, Phase, PhaseArgs}}]}
+ Reply ->
+ return(Meta, State0, Reply, [])
end;
-apply(#{from := From}, {delete_replica, #{stream_id := StreamId, node := Node}} = Cmd,
+apply(Meta, {down, Pid, Reason} = Cmd,
#?MODULE{streams = Streams0,
+ listeners = Listeners0,
monitors = Monitors0} = State) ->
- case maps:get(StreamId, Streams0, undefined) of
- undefined ->
- {State, '$ra_no_reply', wrap_reply(From, {error, not_found})};
- #{conf := Conf0,
- state := running,
- pending_replicas := Pending0} = SState0 ->
- Replicas0 = maps:get(replica_nodes, Conf0),
- ReplicaPids0 = maps:get(replica_pids, Conf0),
- case lists:member(Node, Replicas0) of
- false ->
- reply_and_run_pending(From, StreamId, '$ra_no_reply', ok, [], State);
- true ->
- [Pid] = lists:filter(fun(P) -> node(P) == Node end, ReplicaPids0),
- ReplicaPids = lists:delete(Pid, ReplicaPids0),
- Replicas = lists:delete(Node, Replicas0),
- Pending = lists:delete(Node, Pending0),
- Conf = Conf0#{replica_pids => ReplicaPids,
- replica_nodes => Replicas},
- Phase = phase_delete_replica,
- PhaseArgs = [Node, Conf],
- SState = update_stream_state(From, delete_replica,
- Phase, PhaseArgs,
- SState0#{conf => Conf0,
- pending_replicas => Pending}),
- Monitors = maps:remove(Pid, Monitors0),
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering ~p on node ~p", [StreamId, Phase, Node]),
- {State#?MODULE{monitors = Monitors,
- streams = Streams0#{StreamId => SState}},
- '$ra_no_reply',
- [{demonitor, process, Pid},
- {aux, {phase, StreamId, Phase, PhaseArgs}}]}
- end;
- SState0 ->
- Streams = maps:put(StreamId, add_pending_cmd(From, Cmd, SState0), Streams0),
- {State#?MODULE{streams = Streams}, '$ra_no_reply', []}
- end;
-apply(#{from := From}, {delete_cluster, #{stream_id := StreamId,
- acting_user := ActingUser}} = Cmd,
- #?MODULE{streams = Streams0, monitors = Monitors0} = State) ->
- case maps:get(StreamId, Streams0, undefined) of
- undefined ->
- {State, '$ra_no_reply', wrap_reply(From, {ok, 0})};
- #{conf := Conf,
- state := running} = SState0 ->
- ReplicaPids = maps:get(replica_pids, Conf),
- LeaderPid = maps:get(leader_pid, Conf),
- Monitors = lists:foldl(fun(Pid, M) ->
- maps:remove(Pid, M)
- end, Monitors0, ReplicaPids ++ [LeaderPid]),
- Phase = phase_delete_cluster,
- PhaseArgs = [Conf, ActingUser],
- SState = update_stream_state(From, delete_cluster, Phase, PhaseArgs, SState0),
- Demonitors = [{demonitor, process, Pid} || Pid <- [LeaderPid | ReplicaPids]],
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering ~p",
- [StreamId, Phase]),
- {State#?MODULE{monitors = Monitors,
- streams = Streams0#{StreamId => SState}}, '$ra_no_reply',
- Demonitors ++ [{aux, {phase, StreamId, Phase, PhaseArgs}}]};
- SState0 ->
- Streams = maps:put(StreamId, add_pending_cmd(From, Cmd, SState0), Streams0),
- {State#?MODULE{streams = Streams}, '$ra_no_reply', []}
- end;
-apply(_Meta, {delete_cluster_reply, StreamId}, #?MODULE{streams = Streams} = State0) ->
- #{reply_to := From,
- pending_cmds := Pending} = maps:get(StreamId, Streams),
- State = State0#?MODULE{streams = maps:remove(StreamId, Streams)},
- rabbit_log:debug("rabbit_stream_coordinator: ~p finished delete_cluster_reply",
- [StreamId]),
- Actions = [{ra, pipeline_command, [{?MODULE, node()}, Cmd]} || Cmd <- Pending],
- {State, ok, Actions ++ wrap_reply(From, {ok, 0})};
-apply(_Meta, {down, Pid, _Reason} = Cmd, #?MODULE{streams = Streams,
- monitors = Monitors0} = State) ->
- case maps:get(Pid, Monitors0, undefined) of
- {StreamId, Role} ->
- Monitors = maps:remove(Pid, Monitors0),
- case maps:get(StreamId, Streams, undefined) of
- #{state := delete_cluster} ->
- {State#?MODULE{monitors = Monitors}, ok, []};
- undefined ->
- {State#?MODULE{monitors = Monitors}, ok, []};
- #{state := running,
- conf := #{replica_pids := Pids} = Conf0,
- pending_cmds := Pending0} = SState0 ->
- case Role of
- leader ->
- rabbit_log:info("rabbit_stream_coordinator: ~p leader is down, starting election", [StreamId]),
- Phase = phase_stop_replicas,
- PhaseArgs = [Conf0],
- SState = update_stream_state(undefined, leader_election, Phase, PhaseArgs, SState0),
- Events = [{demonitor, process, P} || P <- Pids],
- Monitors1 = lists:foldl(fun(P, M) ->
- maps:remove(P, M)
- end, Monitors, Pids),
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering ~p", [StreamId, Phase]),
- {State#?MODULE{monitors = Monitors1,
- streams = Streams#{StreamId => SState}},
- ok, Events ++ [{aux, {phase, StreamId, Phase, PhaseArgs}}]};
- follower ->
- case rabbit_misc:is_process_alive(maps:get(leader_pid, Conf0)) of
- true ->
- Phase = phase_start_replica,
- PhaseArgs = [node(Pid), Conf0, 1],
- SState = update_stream_state(undefined,
- replica_restart,
- Phase, PhaseArgs,
- SState0),
- rabbit_log:debug("rabbit_stream_coordinator: ~p replica on node ~p is down, entering ~p", [StreamId, node(Pid), Phase]),
- {State#?MODULE{monitors = Monitors,
- streams = Streams#{StreamId => SState}},
- ok, [{aux, {phase, StreamId, Phase, PhaseArgs}}]};
- false ->
- SState = SState0#{pending_cmds => Pending0 ++ [Cmd]},
- reply_and_run_pending(undefined, StreamId, ok, ok, [], State#?MODULE{streams = Streams#{StreamId => SState}})
- end
- end;
- #{pending_cmds := Pending0} = SState0 ->
- SState = SState0#{pending_cmds => Pending0 ++ [Cmd]},
- {State#?MODULE{streams = Streams#{StreamId => SState}}, ok, []}
+
+ Effects0 = case Reason of
+ noconnection ->
+ [{monitor, node, node(Pid)}];
+ _ ->
+ []
+ end,
+ case maps:take(Pid, Monitors0) of
+ {{StreamId, listener}, Monitors} ->
+ Listeners = case maps:take(StreamId, Listeners0) of
+ error ->
+ Listeners0;
+ {Pids0, Listeners1} ->
+ case maps:remove(Pid, Pids0) of
+ Pids when map_size(Pids) == 0 ->
+ Listeners1;
+ Pids ->
+ Listeners1#{StreamId => Pids}
+ end
+ end,
+ return(Meta, State#?MODULE{listeners = Listeners,
+ monitors = Monitors}, ok, Effects0);
+ {{StreamId, member}, Monitors1} ->
+ case Streams0 of
+ #{StreamId := Stream0} ->
+ Stream1 = update_stream(Meta, Cmd, Stream0),
+ {Stream, Effects} = evaluate_stream(Meta, Stream1, Effects0),
+ Streams = Streams0#{StreamId => Stream},
+ return(Meta, State#?MODULE{streams = Streams,
+ monitors = Monitors1}, ok,
+ Effects);
+ _ ->
+ %% stream not found, can happen if "late" downs are
+ %% received
+ return(Meta, State#?MODULE{streams = Streams0,
+ monitors = Monitors1}, ok, Effects0)
end;
- undefined ->
- {State, ok, []}
+ error ->
+ return(Meta, State, ok, Effects0)
end;
-apply(_Meta, {start_leader_election, StreamId, NewEpoch, Offsets},
- #?MODULE{streams = Streams} = State) ->
- #{conf := Conf0} = SState0 = maps:get(StreamId, Streams),
- #{leader_node := Leader,
- replica_nodes := Replicas,
- replica_pids := ReplicaPids0} = Conf0,
- NewLeader = find_max_offset(Offsets),
- rabbit_log:info("rabbit_stream_coordinator: ~p starting new leader on node ~p",
- [StreamId, NewLeader]),
- {ReplicaPids, _} = delete_replica_pid(NewLeader, ReplicaPids0),
- Conf = rabbit_stream_queue:update_stream_conf(
- Conf0#{epoch => NewEpoch,
- leader_node => NewLeader,
- replica_nodes => lists:delete(NewLeader, Replicas ++ [Leader]),
- replica_pids => ReplicaPids}),
- Phase = phase_start_new_leader,
- PhaseArgs = [Conf],
- SState = SState0#{conf => Conf,
- phase => Phase,
- phase_args => PhaseArgs},
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering phase_start_new_leader",
- [StreamId]),
- {State#?MODULE{streams = Streams#{StreamId => SState}}, ok,
- [{aux, {phase, StreamId, Phase, PhaseArgs}}]};
-apply(_Meta, {leader_elected, StreamId, NewLeaderPid},
- #?MODULE{streams = Streams, monitors = Monitors0} = State) ->
- rabbit_log:info("rabbit_stream_coordinator: ~p leader elected", [StreamId]),
- #{conf := Conf0,
- pending_cmds := Pending0} = SState0 = maps:get(StreamId, Streams),
- #{leader_pid := LeaderPid,
- replica_nodes := Replicas} = Conf0,
- Conf = Conf0#{leader_pid => NewLeaderPid},
- Phase = phase_repair_mnesia,
- PhaseArgs = [update, Conf],
- Pending = Pending0 ++ [{start_replica, #{stream_id => StreamId, node => R,
- retries => 1, from => undefined}}
- || R <- Replicas],
- SState = SState0#{conf => Conf,
- phase => Phase,
- phase_args => PhaseArgs,
- pending_replicas => Replicas,
- pending_cmds => Pending},
- Monitors = maps:put(NewLeaderPid, {StreamId, leader}, maps:remove(LeaderPid, Monitors0)),
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering ~p after "
- "leader election", [StreamId, Phase]),
- {State#?MODULE{streams = Streams#{StreamId => SState},
- monitors = Monitors}, ok,
- [{monitor, process, NewLeaderPid},
- {aux, {phase, StreamId, Phase, PhaseArgs}}]};
-apply(_Meta, {replicas_stopped, StreamId}, #?MODULE{streams = Streams} = State) ->
- case maps:get(StreamId, Streams, undefined) of
- undefined ->
- {State, {error, not_found}, []};
- #{conf := Conf0} = SState0 ->
- Phase = phase_check_quorum,
- Conf = Conf0#{replica_pids => []},
- PhaseArgs = [Conf],
- SState = SState0#{conf => Conf,
- phase => Phase,
- phase_args => PhaseArgs},
- rabbit_log:info("rabbit_stream_coordinator: ~p all replicas have been stopped, "
- "checking quorum available", [StreamId]),
- {State#?MODULE{streams = Streams#{StreamId => SState}}, ok,
- [{aux, {phase, StreamId, Phase, PhaseArgs}}]}
+apply(Meta, {register_listener, #{pid := Pid,
+ stream_id := StreamId}},
+ #?MODULE{streams = Streams,
+ monitors = Monitors0} = State0) ->
+ case Streams of
+ #{StreamId := #stream{listeners = Listeners0} = Stream0} ->
+ Stream1 = Stream0#stream{listeners = maps:put(Pid, undefined, Listeners0)},
+ {Stream, Effects} = eval_listeners(Stream1, []),
+ Monitors = maps:put(Pid, {StreamId, listener}, Monitors0),
+ return(Meta,
+ State0#?MODULE{streams = maps:put(StreamId, Stream, Streams),
+ monitors = Monitors}, ok,
+ [{monitor, process, Pid} | Effects]);
+ _ ->
+ return(Meta, State0, stream_not_found, [])
end;
-apply(_Meta, {stream_updated, #{name := StreamId} = Conf}, #?MODULE{streams = Streams} = State) ->
- SState0 = maps:get(StreamId, Streams),
- Phase = phase_repair_mnesia,
- PhaseArgs = [update, Conf],
- SState = SState0#{conf => Conf,
- phase => Phase,
- phase_args => PhaseArgs},
- rabbit_log:debug("rabbit_stream_coordinator: ~p entering ~p after"
- " stream_updated", [StreamId, Phase]),
- {State#?MODULE{streams = Streams#{StreamId => SState}}, ok,
- [{aux, {phase, StreamId, Phase, PhaseArgs}}]};
-apply(_, {timeout, {pipeline, Cmds}}, State) ->
- Actions = [{mod_call, ra, pipeline_command, [{?MODULE, node()}, Cmd]} || Cmd <- Cmds],
- {State, ok, Actions};
-apply(_, {timeout, {aux, Cmd}}, State) ->
- {State, ok, [{aux, Cmd}]};
-apply(Meta, {_, #{from := From}} = Cmd, State) ->
- ?MODULE:apply(Meta#{from => From}, Cmd, State).
-
-state_enter(leader, #?MODULE{streams = Streams, monitors = Monitors}) ->
- maps:fold(fun(_, #{conf := #{name := StreamId},
- pending_replicas := Pending,
- state := State,
- phase := Phase,
- phase_args := PhaseArgs}, Acc) ->
- restart_aux_phase(State, Phase, PhaseArgs, StreamId) ++
- pipeline_restart_replica_cmds(StreamId, Pending) ++
- Acc
- end, [{monitor, process, P} || P <- maps:keys(Monitors)], Streams);
-state_enter(follower, #?MODULE{monitors = Monitors}) ->
- [{monitor, process, P} || P <- maps:keys(Monitors)];
+apply(Meta, {nodeup, Node} = Cmd,
+ #?MODULE{monitors = Monitors0,
+ streams = Streams0} = State) ->
+ %% reissue monitors for all disconnected members
+ {Effects0, Monitors} =
+ maps:fold(
+ fun(_, #stream{id = Id,
+ members = M}, {Acc, Mon}) ->
+ case M of
+ #{Node := #member{state = {disconnected, _, P}}} ->
+ {[{monitor, process, P} | Acc],
+ Mon#{P => {Id, member}}};
+ _ ->
+ {Acc, Mon}
+ end
+ end, {[], Monitors0}, Streams0),
+ {Streams, Effects} =
+ maps:fold(fun (Id, S0, {Ss, E0}) ->
+ S1 = update_stream(Meta, Cmd, S0),
+ {S, E} = evaluate_stream(Meta, S1, E0),
+ {Ss#{Id => S}, E}
+ end, {Streams0, Effects0}, Streams0),
+ return(Meta, State#?MODULE{monitors = Monitors,
+ streams = Streams}, ok, Effects);
+apply(Meta, UnkCmd, State) ->
+ rabbit_log:debug("~s: unknown command ~W",
+ [?MODULE, UnkCmd, 10]),
+ return(Meta, State, {error, unknown_command}, []).
+
+return(#{index := Idx}, State, Reply, Effects) ->
+ case Idx rem 4096 == 0 of
+ true ->
+ %% add release cursor effect
+ {State, Reply, [{release_cursor, Idx, State} | Effects]};
+ false ->
+ {State, Reply, Effects}
+ end.
+
state_enter(recover, _) ->
put('$rabbit_vm_category', ?MODULE),
[];
-state_enter(_, _) ->
+state_enter(leader, #?MODULE{streams = Streams,
+ monitors = Monitors}) ->
+ Pids = maps:keys(Monitors),
+ %% monitor all the known nodes
+ Nodes = all_member_nodes(Streams),
+ NodeMons = [{monitor, node, N} || N <- Nodes],
+ NodeMons ++ [{aux, fail_active_actions} |
+ [{monitor, process, P} || P <- Pids]];
+state_enter(_S, _) ->
[].
-restart_aux_phase(running, _, _, _) ->
- [];
-restart_aux_phase(_State, Phase, PhaseArgs, StreamId) ->
- [{aux, {phase, StreamId, Phase, PhaseArgs}}].
-
-pipeline_restart_replica_cmds(StreamId, Pending) ->
- [{timer, {pipeline, [{start_replica, #{stream_id => StreamId,
- node => Node,
- from => undefined,
- retries => 1}}
- || Node <- Pending]}, ?RESTART_TIMEOUT}].
+all_member_nodes(Streams) ->
+ maps:keys(
+ maps:fold(
+ fun (_, #stream{members = M}, Acc) ->
+ maps:merge(Acc, M)
+ end, #{}, Streams)).
tick(_Ts, _State) ->
[{aux, maybe_resize_coordinator_cluster}].
@@ -562,21 +475,22 @@ maybe_resize_coordinator_cluster() ->
{_, Members, _} ->
MemberNodes = [Node || {_, Node} <- Members],
Running = rabbit_mnesia:cluster_nodes(running),
- All = rabbit_mnesia:cluster_nodes(all),
+ All = rabbit_nodes:all(),
case Running -- MemberNodes of
[] ->
ok;
New ->
- rabbit_log:warning("New rabbit node(s) detected, "
- "adding stream coordinator in: ~p", [New]),
+ rabbit_log:info("~s: New rabbit node(s) detected, "
+ "adding : ~w",
+ [?MODULE, New]),
add_members(Members, New)
end,
case MemberNodes -- All of
[] ->
ok;
Old ->
- rabbit_log:warning("Rabbit node(s) removed from the cluster, "
- "deleting stream coordinator in: ~p", [Old]),
+ rabbit_log:info("~s: Rabbit node(s) removed from the cluster, "
+ "deleting: ~w", [?MODULE, Old]),
remove_members(Members, Old)
end;
_ ->
@@ -588,7 +502,7 @@ add_members(_, []) ->
ok;
add_members(Members, [Node | Nodes]) ->
Conf = make_ra_conf(Node, [N || {_, N} <- Members]),
- case ra:start_server(Conf) of
+ case ra:start_server(?RA_SYSTEM, Conf) of
ok ->
case ra:add_member(Members, {?MODULE, Node}) of
{ok, NewMembers, _} ->
@@ -597,8 +511,8 @@ add_members(Members, [Node | Nodes]) ->
add_members(Members, Nodes)
end;
Error ->
- rabbit_log:warning("Stream coordinator failed to start on node ~p : ~p",
- [Node, Error]),
+ rabbit_log:warning("Stream coordinator failed to start on node ~s : ~W",
+ [Node, Error, 10]),
add_members(Members, Nodes)
end.
@@ -612,195 +526,266 @@ remove_members(Members, [Node | Nodes]) ->
remove_members(Members, Nodes)
end.
+-record(aux, {actions = #{} ::
+ #{pid() := {stream_id(), #{node := node(),
+ index := non_neg_integer(),
+ epoch := osiris:epoch()}}},
+ resizer :: undefined | pid()}).
+
init_aux(_Name) ->
- {#{}, undefined}.
+ #aux{}.
%% TODO ensure the dead writer is restarted as a replica at some point in time, increasing timeout?
-handle_aux(leader, _, maybe_resize_coordinator_cluster, {Monitors, undefined}, LogState, _) ->
+handle_aux(leader, _, maybe_resize_coordinator_cluster,
+ #aux{resizer = undefined} = Aux, LogState, _) ->
Pid = maybe_resize_coordinator_cluster(),
- {no_reply, {Monitors, Pid}, LogState, [{monitor, process, aux, Pid}]};
-handle_aux(leader, _, maybe_resize_coordinator_cluster, AuxState, LogState, _) ->
+ {no_reply, Aux#aux{resizer = Pid}, LogState, [{monitor, process, aux, Pid}]};
+handle_aux(leader, _, maybe_resize_coordinator_cluster,
+ AuxState, LogState, _) ->
%% Coordinator resizing is still happening, let's ignore this tick event
{no_reply, AuxState, LogState};
-handle_aux(leader, _, {down, Pid, _}, {Monitors, Pid}, LogState, _) ->
+handle_aux(leader, _, {down, Pid, _},
+ #aux{resizer = Pid} = Aux, LogState, _) ->
%% Coordinator resizing has finished
- {no_reply, {Monitors, undefined}, LogState};
-handle_aux(leader, _, {phase, _, Fun, Args} = Cmd, {Monitors, Coordinator}, LogState, _) ->
- Pid = erlang:apply(?MODULE, Fun, Args),
- Actions = [{monitor, process, aux, Pid}],
- {no_reply, {maps:put(Pid, Cmd, Monitors), Coordinator}, LogState, Actions};
-handle_aux(leader, _, {down, Pid, normal}, {Monitors, Coordinator}, LogState, _) ->
- {no_reply, {maps:remove(Pid, Monitors), Coordinator}, LogState};
-handle_aux(leader, _, {down, Pid, Reason}, {Monitors0, Coordinator}, LogState, _) ->
- %% The phase has failed, let's retry it
- case maps:get(Pid, Monitors0) of
- {phase, StreamId, phase_start_new_leader, Args} ->
- rabbit_log:warning("Error while starting new leader for stream queue ~p, "
- "restarting election: ~p", [StreamId, Reason]),
- Monitors = maps:remove(Pid, Monitors0),
- Cmd = {phase, StreamId, phase_check_quorum, Args},
- {no_reply, {Monitors, Coordinator}, LogState, [{timer, {aux, Cmd}, ?PHASE_RETRY_TIMEOUT}]};
- {phase, StreamId, Fun, _} = Cmd ->
- rabbit_log:warning("Error while executing coordinator phase ~p for stream queue ~p ~p",
- [Fun, StreamId, Reason]),
+ {no_reply, Aux#aux{resizer = undefined}, LogState};
+handle_aux(leader, _, {start_writer, StreamId,
+ #{epoch := Epoch, node := Node} = Args, Conf},
+ Aux, LogState, _) ->
+ rabbit_log:debug("~s: running action: 'start_writer'"
+ " for ~s on node ~w in epoch ~b",
+ [?MODULE, StreamId, Node, Epoch]),
+ ActionFun = phase_start_writer(StreamId, Args, Conf),
+ run_action(starting, StreamId, Args, ActionFun, Aux, LogState);
+handle_aux(leader, _, {start_replica, StreamId,
+ #{epoch := Epoch, node := Node} = Args, Conf},
+ Aux, LogState, _) ->
+ rabbit_log:debug("~s: running action: 'start_replica'"
+ " for ~s on node ~w in epoch ~b",
+ [?MODULE, StreamId, Node, Epoch]),
+ ActionFun = phase_start_replica(StreamId, Args, Conf),
+ run_action(starting, StreamId, Args, ActionFun, Aux, LogState);
+handle_aux(leader, _, {stop, StreamId, #{node := Node,
+ epoch := Epoch} = Args, Conf},
+ Aux, LogState, _) ->
+ rabbit_log:debug("~s: running action: 'stop'"
+ " for ~s on node ~w in epoch ~b",
+ [?MODULE, StreamId, Node, Epoch]),
+ ActionFun = phase_stop_member(StreamId, Args, Conf),
+ run_action(stopping, StreamId, Args, ActionFun, Aux, LogState);
+handle_aux(leader, _, {update_mnesia, StreamId, Args, Conf},
+ #aux{actions = _Monitors} = Aux, LogState,
+ #?MODULE{streams = _Streams}) ->
+ rabbit_log:debug("~s: running action: 'update_mnesia'"
+ " for ~s", [?MODULE, StreamId]),
+ ActionFun = phase_update_mnesia(StreamId, Args, Conf),
+ run_action(updating_mnesia, StreamId, Args, ActionFun, Aux, LogState);
+handle_aux(leader, _, {update_retention, StreamId, Args, _Conf},
+ #aux{actions = _Monitors} = Aux, LogState,
+ #?MODULE{streams = _Streams}) ->
+ rabbit_log:debug("~s: running action: 'update_retention'"
+ " for ~s", [?MODULE, StreamId]),
+ ActionFun = phase_update_retention(StreamId, Args),
+ run_action(update_retention, StreamId, Args, ActionFun, Aux, LogState);
+handle_aux(leader, _, {delete_member, StreamId, #{node := Node} = Args, Conf},
+ #aux{actions = _Monitors} = Aux, LogState,
+ #?MODULE{streams = _Streams}) ->
+ rabbit_log:debug("~s: running action: 'delete_member'"
+ " for ~s ~s", [?MODULE, StreamId, Node]),
+ ActionFun = phase_delete_member(StreamId, Args, Conf),
+ run_action(delete_member, StreamId, Args, ActionFun, Aux, LogState);
+handle_aux(leader, _, fail_active_actions,
+ #aux{actions = Monitors} = Aux, LogState,
+ #?MODULE{streams = Streams}) ->
+ Exclude = maps:from_list([{S, ok}
+ || {P, {S, _, _}} <- maps:to_list(Monitors),
+ not is_process_alive(P)]),
+ rabbit_log:debug("~s: failing actions: ~w", [?MODULE, Exclude]),
+ fail_active_actions(Streams, Exclude),
+ {no_reply, Aux, LogState, []};
+handle_aux(leader, _, {down, Pid, normal},
+ #aux{actions = Monitors} = Aux, LogState, _) ->
+ %% action process finished normally, just remove from actions map
+ {no_reply, Aux#aux{actions = maps:remove(Pid, Monitors)}, LogState, []};
+handle_aux(leader, _, {down, Pid, Reason},
+ #aux{actions = Monitors0} = Aux, LogState, _) ->
+ %% An action has failed - report back to the state machine
+ case maps:get(Pid, Monitors0, undefined) of
+ {StreamId, Action, #{node := Node, epoch := Epoch} = Args} ->
+ rabbit_log:warning("~s: error while executing action for stream queue ~s, "
+ " node ~s, epoch ~b Err: ~w",
+ [?MODULE, StreamId, Node, Epoch, Reason]),
Monitors = maps:remove(Pid, Monitors0),
- {no_reply, {Monitors, Coordinator}, LogState, [{timer, {aux, Cmd}, ?PHASE_RETRY_TIMEOUT}]}
+ Cmd = {action_failed, StreamId, Args#{action => Action}},
+ send_self_command(Cmd),
+ {no_reply, Aux#aux{actions = maps:remove(Pid, Monitors)},
+ LogState, []};
+ undefined ->
+ %% should this ever happen?
+ {no_reply, Aux, LogState, []}
end;
handle_aux(_, _, _, AuxState, LogState, _) ->
{no_reply, AuxState, LogState}.
-reply_and_run_pending(From, StreamId, Reply, WrapReply, Actions0, #?MODULE{streams = Streams} = State) ->
- #{pending_cmds := Pending} = SState0 = maps:get(StreamId, Streams),
- AuxActions = [{mod_call, ra, pipeline_command, [{?MODULE, node()}, Cmd]}
- || Cmd <- Pending],
- SState = maps:put(pending_cmds, [], SState0),
- Actions = case From of
- undefined ->
- AuxActions ++ Actions0;
- _ ->
- wrap_reply(From, WrapReply) ++ AuxActions ++ Actions0
- end,
- {State#?MODULE{streams = Streams#{StreamId => SState}}, Reply, Actions}.
+run_action(Action, StreamId, #{node := _Node,
+ epoch := _Epoch} = Args,
+ ActionFun, #aux{actions = Actions0} = Aux, Log) ->
+ Coordinator = self(),
+ Pid = spawn_link(fun() ->
+ ActionFun(),
+ unlink(Coordinator)
+ end),
+ Effects = [{monitor, process, aux, Pid}],
+ Actions = Actions0#{Pid => {StreamId, Action, Args}},
+ {no_reply, Aux#aux{actions = Actions}, Log, Effects}.
wrap_reply(From, Reply) ->
[{reply, From, {wrap_reply, Reply}}].
-add_pending_cmd(From, {CmdName, CmdMap}, #{pending_cmds := Pending0} = StreamState) ->
- %% Remove from pending the leader election and automatic replica restart when
- %% the command is delete_cluster
- Pending = case CmdName of
- delete_cluster ->
- lists:filter(fun({down, _, _}) ->
- false;
- (_) ->
- true
- end, Pending0);
- _ ->
- Pending0
- end,
- maps:put(pending_cmds, Pending ++ [{CmdName, maps:put(from, From, CmdMap)}],
- StreamState).
-
-clear_stream_state(StreamState) ->
- StreamState#{reply_to => undefined,
- state => running,
- phase => undefined,
- phase_args => undefined}.
-
-update_stream_state(From, State, Phase, PhaseArgs, StreamState) ->
- StreamState#{reply_to => From,
- state => State,
- phase => Phase,
- phase_args => PhaseArgs}.
-
-phase_start_replica(Node, #{name := StreamId} = Conf0,
- Retries) ->
- spawn(
- fun() ->
- %% If a new leader hasn't yet been elected, this will fail with a badmatch
- %% as get_reader_context returns a no proc. An unhandled failure will
- %% crash this monitored process and restart it later.
- %% TODO However, do we want that crash in the log? We might need to try/catch
- %% to provide a log message instead as it's 'expected'. We could try to
- %% verify first that the leader is alive, but there would still be potential
- %% for a race condition in here.
- try
- case osiris_replica:start(Node, Conf0) of
- {ok, Pid} ->
- ra:pipeline_command({?MODULE, node()},
- {start_replica_reply, StreamId, Pid});
- {error, already_present} ->
- ra:pipeline_command({?MODULE, node()}, {phase_finished, StreamId, ok});
- {error, {already_started, _}} ->
- ra:pipeline_command({?MODULE, node()}, {phase_finished, StreamId, ok});
- {error, Reason} = Error ->
- rabbit_log:warning("Error while starting replica for ~p : ~p",
- [maps:get(name, Conf0), Reason]),
- ra:pipeline_command({?MODULE, node()},
- {start_replica_failed, StreamId, Node, Retries, Error})
- end
- catch _:E->
- rabbit_log:warning("Error while starting replica for ~p : ~p",
- [maps:get(name, Conf0), E]),
- ra:pipeline_command({?MODULE, node()},
- {start_replica_failed, StreamId, Node, Retries, {error, E}})
- end
- end).
-
-phase_delete_replica(Node, Conf) ->
- spawn(
- fun() ->
- ok = osiris_replica:delete(Node, Conf),
- ra:pipeline_command({?MODULE, node()}, {stream_updated, Conf})
- end).
-
-phase_stop_replicas(#{replica_nodes := Replicas,
- name := StreamId} = Conf) ->
- spawn(
- fun() ->
- [try
- osiris_replica:stop(Node, Conf)
- catch _:{{nodedown, _}, _} ->
- %% It could be the old leader that is still down, it's normal.
- ok
- end || Node <- Replicas],
- ra:pipeline_command({?MODULE, node()}, {replicas_stopped, StreamId})
- end).
-
-phase_start_new_leader(#{name := StreamId, leader_node := Node, leader_pid := LPid} = Conf) ->
- spawn(fun() ->
- osiris_replica:stop(Node, Conf),
- %% If the start fails, the monitor will capture the crash and restart it
- case osiris_writer:start(Conf) of
- {ok, Pid} ->
- ra:pipeline_command({?MODULE, node()},
- {leader_elected, StreamId, Pid});
- {error, already_present} ->
- ra:pipeline_command({?MODULE, node()},
- {leader_elected, StreamId, LPid});
- {error, {already_started, Pid}} ->
- ra:pipeline_command({?MODULE, node()},
- {leader_elected, StreamId, Pid})
- end
- end).
+phase_start_replica(StreamId, #{epoch := Epoch,
+ node := Node} = Args, Conf0) ->
+ fun() ->
+ try osiris_replica:start(Node, Conf0) of
+ {ok, Pid} ->
+ rabbit_log:info("~s: ~s: replica started on ~s in ~b pid ~w",
+ [?MODULE, StreamId, Node, Epoch, Pid]),
+ send_self_command({member_started, StreamId,
+ Args#{pid => Pid}});
+ {error, already_present} ->
+ %% need to remove child record if this is the case
+ %% can it ever happen?
+ _ = osiris_replica:stop(Node, Conf0),
+ send_action_failed(StreamId, starting, Args);
+ {error, {already_started, Pid}} ->
+ %% TODO: we need to check that the current epoch is the same
+ %% before we can be 100% sure it is started in the correct
+ %% epoch, can this happen? who knows...
+ send_self_command({member_started, StreamId,
+ Args#{pid => Pid}});
+ {error, Reason} ->
+ rabbit_log:warning("~s: Error while starting replica for ~s on node ~s in ~b : ~W",
+ [?MODULE, maps:get(name, Conf0), Node, Epoch, Reason, 10]),
+ maybe_sleep(Reason),
+ send_action_failed(StreamId, starting, Args)
+ catch _:Error ->
+ rabbit_log:warning("~s: Error while starting replica for ~s on node ~s in ~b : ~W",
+ [?MODULE, maps:get(name, Conf0), Node, Epoch, Error, 10]),
+ maybe_sleep(Error),
+ send_action_failed(StreamId, starting, Args)
+ end
+ end.
-phase_check_quorum(#{name := StreamId,
- epoch := Epoch,
- replica_nodes := Nodes} = Conf) ->
- spawn(fun() ->
- Offsets = find_replica_offsets(Conf),
- case is_quorum(length(Nodes) + 1, length(Offsets)) of
- true ->
- ra:pipeline_command({?MODULE, node()},
- {start_leader_election, StreamId, Epoch + 1, Offsets});
- false ->
- %% Let's crash this process so the monitor will restart it
- exit({not_enough_quorum, StreamId})
- end
- end).
+send_action_failed(StreamId, Action, Arg) ->
+ send_self_command({action_failed, StreamId, Arg#{action => Action}}).
-find_replica_offsets(#{replica_nodes := Nodes,
- leader_node := Leader} = Conf) ->
- lists:foldl(
- fun(Node, Acc) ->
- try
- %% osiris_log:overview/1 needs the directory - last item of the list
- case rpc:call(Node, rabbit, is_running, []) of
- false ->
- Acc;
- true ->
- case rpc:call(Node, ?MODULE, log_overview, [Conf]) of
- {badrpc, nodedown} ->
- Acc;
- {_Range, Offsets} ->
- [{Node, select_highest_offset(Offsets)} | Acc]
- end
- end
- catch
- _:_ ->
- Acc
- end
- end, [], Nodes ++ [Leader]).
+send_self_command(Cmd) ->
+ ra:pipeline_command({?MODULE, node()}, Cmd),
+ ok.
+
+
+phase_delete_member(StreamId, #{node := Node} = Arg, Conf) ->
+ fun() ->
+ try osiris_server_sup:delete_child(Node, Conf) of
+ ok ->
+ rabbit_log:info("~s: Member deleted for ~s : on node ~s",
+ [?MODULE, StreamId, Node]),
+ send_self_command({member_deleted, StreamId, Arg});
+ _ ->
+ send_action_failed(StreamId, deleting, Arg)
+ catch _:E ->
+ rabbit_log:warning("~s: Error while deleting member for ~s : on node ~s ~W",
+ [?MODULE, StreamId, Node, E, 10]),
+ maybe_sleep(E),
+ send_action_failed(StreamId, deleting, Arg)
+ end
+ end.
+
+phase_stop_member(StreamId, #{node := Node,
+ epoch := Epoch} = Arg0, Conf) ->
+ fun() ->
+ try osiris_server_sup:stop_child(Node, StreamId) of
+ ok ->
+ %% get tail
+ try get_replica_tail(Node, Conf) of
+ {ok, Tail} ->
+ Arg = Arg0#{tail => Tail},
+ rabbit_log:debug("~s: ~s: member stopped on ~s in ~b Tail ~w",
+ [?MODULE, StreamId, Node, Epoch, Tail]),
+ send_self_command({member_stopped, StreamId, Arg});
+ Err ->
+ rabbit_log:warning("~s: failed to get tail of member ~s on ~s in ~b Error: ~w",
+ [?MODULE, StreamId, Node, Epoch, Err]),
+ maybe_sleep(Err),
+ send_action_failed(StreamId, stopping, Arg0)
+ catch _:Err ->
+ rabbit_log:warning("~s: failed to get tail of member ~s on ~s in ~b Error: ~w",
+ [?MODULE, StreamId, Node, Epoch, Err]),
+ maybe_sleep(Err),
+ send_action_failed(StreamId, stopping, Arg0)
+ end;
+ Err ->
+ rabbit_log:warning("~s: failed to stop "
+ "member ~s ~w Error: ~w",
+ [?MODULE, StreamId, Node, Err]),
+ maybe_sleep(Err),
+ send_action_failed(StreamId, stopping, Arg0)
+ catch _:Err ->
+ rabbit_log:warning("~s: failed to stop member ~s ~w Error: ~w",
+ [?MODULE, StreamId, Node, Err]),
+ maybe_sleep(Err),
+ send_action_failed(StreamId, stopping, Arg0)
+ end
+ end.
+
+phase_start_writer(StreamId, #{epoch := Epoch,
+ node := Node} = Args0, Conf) ->
+ fun() ->
+ try osiris_writer:start(Conf) of
+ {ok, Pid} ->
+ Args = Args0#{epoch => Epoch, pid => Pid},
+ rabbit_log:info("~s: started writer ~s on ~w in ~b",
+ [?MODULE, StreamId, Node, Epoch]),
+ send_self_command({member_started, StreamId, Args});
+ Err ->
+ %% no sleep for writer failures as we want to trigger a new
+ %% election asap
+ rabbit_log:warning("~s: failed to start writer ~s on ~s in ~b Error: ~w",
+ [?MODULE, StreamId, Node, Epoch, Err]),
+ send_action_failed(StreamId, starting, Args0)
+ catch _:Err ->
+ rabbit_log:warning("~s: failed to start writer ~s on ~s in ~b Error: ~w",
+ [?MODULE, StreamId, Node, Epoch, Err]),
+ send_action_failed(StreamId, starting, Args0)
+ end
+ end.
+
+phase_update_retention(StreamId, #{pid := Pid,
+ retention := Retention} = Args) ->
+ fun() ->
+ try osiris:update_retention(Pid, Retention) of
+ ok ->
+ send_self_command({retention_updated, StreamId, Args});
+ {error, Reason} = Err ->
+ rabbit_log:warning("~s: failed to update retention for ~s ~w Reason: ~w",
+ [?MODULE, StreamId, node(Pid), Reason]),
+ maybe_sleep(Err),
+ send_action_failed(StreamId, update_retention, Args)
+ catch _:Err ->
+ rabbit_log:warning("~s: failed to update retention for ~s ~w Error: ~w",
+ [?MODULE, StreamId, node(Pid), Err]),
+ maybe_sleep(Err),
+ send_action_failed(StreamId, update_retention, Args)
+ end
+ end.
+
+get_replica_tail(Node, Conf) ->
+ case rpc:call(Node, ?MODULE, log_overview, [Conf]) of
+ {badrpc, nodedown} ->
+ {error, nodedown};
+ {error, _} = Err ->
+ Err;
+ {_Range, Offsets} ->
+ {ok, select_highest_offset(Offsets)}
+ end.
select_highest_offset([]) ->
empty;
@@ -808,74 +793,83 @@ select_highest_offset(Offsets) ->
lists:last(Offsets).
log_overview(Config) ->
- Dir = osiris_log:directory(Config),
- osiris_log:overview(Dir).
+ case whereis(osiris_sup) of
+ undefined ->
+ {error, app_not_running};
+ _ ->
+ Dir = osiris_log:directory(Config),
+ osiris_log:overview(Dir)
+ end.
-find_max_offset(Offsets) ->
- [{Node, _} | _] = lists:sort(fun({_, {Ao, E}}, {_, {Bo, E}}) ->
- Ao >= Bo;
- ({_, {_, Ae}}, {_, {_, Be}}) ->
- Ae >= Be;
- ({_, empty}, _) ->
- false;
- (_, {_, empty}) ->
- true
- end, Offsets),
- Node.
+
+replay(L) when is_list(L) ->
+ lists:foldl(
+ fun ({M, E}, Acc) ->
+ element(1, ?MODULE:apply(M, E, Acc))
+ end, init(#{}), L).
is_quorum(1, 1) ->
true;
is_quorum(NumReplicas, NumAlive) ->
NumAlive >= ((NumReplicas div 2) + 1).
-phase_repair_mnesia(new, Q) ->
- spawn(fun() ->
- Reply = rabbit_amqqueue:internal_declare(Q, false),
- #{name := StreamId} = amqqueue:get_type_state(Q),
- ra:pipeline_command({?MODULE, node()}, {phase_finished, StreamId, Reply})
- end);
-
-phase_repair_mnesia(update, #{reference := QName,
- leader_pid := LeaderPid,
- name := StreamId} = Conf) ->
- Fun = fun (Q) ->
- amqqueue:set_type_state(amqqueue:set_pid(Q, LeaderPid), Conf)
- end,
- spawn(fun() ->
- case rabbit_misc:execute_mnesia_transaction(
- fun() ->
- rabbit_amqqueue:update(QName, Fun)
- end) of
- not_found ->
- %% This can happen during recovery
- [Q] = mnesia:dirty_read(rabbit_durable_queue, QName),
- rabbit_amqqueue:ensure_rabbit_queue_record_is_initialized(Fun(Q));
- _ ->
- ok
+phase_update_mnesia(StreamId, Args, #{reference := QName,
+ leader_pid := LeaderPid} = Conf) ->
+ fun() ->
+ rabbit_log:debug("~s: running mnesia update for ~s: ~W",
+ [?MODULE, StreamId, Conf, 10]),
+ Fun = fun (Q) ->
+ case amqqueue:get_type_state(Q) of
+ #{name := S} when S == StreamId ->
+ %% the stream id matches so we can update the
+ %% amqqueue record
+ amqqueue:set_type_state(
+ amqqueue:set_pid(Q, LeaderPid), Conf);
+ Ts ->
+ S = maps:get(name, Ts, undefined),
+ rabbit_log:debug("~s: refusing mnesia update for stale stream id ~s, current ~s",
+ [?MODULE, StreamId, S]),
+ %% if the stream id isn't a match this is a stale
+ %% update from a previous stream incarnation for the
+ %% same queue name and we ignore it
+ Q
+ end
end,
- ra:pipeline_command({?MODULE, node()}, {phase_finished, StreamId, ok})
- end).
-
-phase_start_cluster(Q0) ->
- spawn(
- fun() ->
- case osiris:start_cluster(amqqueue:get_type_state(Q0)) of
- {ok, #{leader_pid := Pid} = Conf} ->
- Q = amqqueue:set_type_state(amqqueue:set_pid(Q0, Pid), Conf),
- ra:pipeline_command({?MODULE, node()}, {start_cluster_reply, Q});
- {error, {already_started, _}} ->
- ra:pipeline_command({?MODULE, node()}, {start_cluster_finished, {error, already_started}})
- end
- end).
-
-phase_delete_cluster(#{name := StreamId,
- reference := QName} = Conf, ActingUser) ->
- spawn(
- fun() ->
- ok = osiris:delete_cluster(Conf),
- _ = rabbit_amqqueue:internal_delete(QName, ActingUser),
- ra:pipeline_command({?MODULE, node()}, {delete_cluster_reply, StreamId})
- end).
+ try rabbit_misc:execute_mnesia_transaction(
+ fun() ->
+ rabbit_amqqueue:update(QName, Fun)
+ end) of
+ not_found ->
+ rabbit_log:debug("~s: resource for stream id ~s not found, "
+ "recovering from rabbit_durable_queue",
+ [?MODULE, StreamId]),
+ %% This can happen during recovery
+ %% we need to re-initialise the queue record
+ %% if the stream id is a match
+ case mnesia:dirty_read(rabbit_durable_queue, QName) of
+ [] ->
+ %% queue not found at all, it must have been deleted
+ ok;
+ [Q] ->
+ case amqqueue:get_type_state(Q) of
+ #{name := S} when S == StreamId ->
+ rabbit_log:debug("~s: initializing queue record for stream id ~s",
+ [?MODULE, StreamId]),
+ _ = rabbit_amqqueue:ensure_rabbit_queue_record_is_initialized(Fun(Q)),
+ ok;
+ _ ->
+ ok
+ end,
+ send_self_command({mnesia_updated, StreamId, Args})
+ end;
+ _ ->
+ send_self_command({mnesia_updated, StreamId, Args})
+ catch _:E ->
+ rabbit_log:debug("~s: failed to update mnesia for ~s: ~W",
+ [?MODULE, StreamId, E, 10]),
+ send_action_failed(StreamId, updating_mnesia, Args)
+ end
+ end.
format_ra_event(ServerId, Evt) ->
{stream_coordinator_event, ServerId, Evt}.
@@ -897,53 +891,674 @@ make_ra_conf(Node, Nodes) ->
machine => {module, ?MODULE, #{}},
ra_event_formatter => Formatter}.
-add_unique(Node, Nodes) ->
- case lists:member(Node, Nodes) of
+filter_command(_Meta, {delete_replica, _, #{node := Node}}, #stream{id = StreamId,
+ members = Members0}) ->
+ Members = maps:filter(fun(_, #member{target = S}) when S =/= deleted ->
+ true;
+ (_, _) ->
+ false
+ end, Members0),
+ case maps:size(Members) =< 1 of
true ->
- Nodes;
+ rabbit_log:warning(
+ "~s failed to delete replica on node ~s for stream ~s: refusing to delete the only replica",
+ [?MODULE, Node, StreamId]),
+ {error, last_stream_member};
+ false ->
+ ok
+ end;
+filter_command(_, _, _) ->
+ ok.
+
+update_stream(Meta, Cmd, Stream) ->
+ try
+ update_stream0(Meta, Cmd, Stream)
+ catch
+ _:E:Stacktrace ->
+ rabbit_log:warning(
+ "~s failed to update stream:~n~W~n~W",
+ [?MODULE, E, 10, Stacktrace, 10]),
+ Stream
+ end.
+
+update_stream0(#{system_time := _} = Meta,
+ {new_stream, StreamId, #{leader_node := LeaderNode,
+ queue := Q}}, undefined) ->
+ #{nodes := Nodes} = Conf = amqqueue:get_type_state(Q),
+ %% this jumps straight to the state where all members
+ %% have been stopped and a new writer has been chosen
+ E = 1,
+ QueueRef = amqqueue:get_name(Q),
+ Members = maps:from_list(
+ [{N, #member{role = case LeaderNode of
+ N -> {writer, E};
+ _ -> {replica, E}
+ end,
+ node = N,
+ state = {ready, E},
+ %% no members are running actions
+ current = undefined}
+ } || N <- Nodes]),
+ #stream{id = StreamId,
+ epoch = E,
+ nodes = Nodes,
+ queue_ref = QueueRef,
+ conf = Conf,
+ members = Members,
+ reply_to = maps:get(from, Meta, undefined)};
+update_stream0(#{system_time := _Ts} = _Meta,
+ {delete_stream, _StreamId, #{}},
+ #stream{members = Members0,
+ target = _} = Stream0) ->
+ Members = maps:map(
+ fun (_, M) ->
+ M#member{target = deleted}
+ end, Members0),
+ Stream0#stream{members = Members,
+ %% reset reply_to here to ensure a reply
+ %% is returned as the command has been accepted
+ reply_to = undefined,
+ target = deleted};
+update_stream0(#{system_time := _Ts} = _Meta,
+ {add_replica, _StreamId, #{node := Node}},
+ #stream{members = Members0,
+ epoch = Epoch,
+ nodes = Nodes,
+ target = _} = Stream0) ->
+ case maps:is_key(Node, Members0) of
+ true ->
+ Stream0;
+ false ->
+ Members1 = Members0#{Node => #member{role = {replica, Epoch},
+ node = Node,
+ target = stopped}},
+ Members = set_running_to_stopped(Members1),
+ Stream0#stream{members = Members,
+ nodes = lists:sort([Node | Nodes])}
+ end;
+update_stream0(#{system_time := _Ts} = _Meta,
+ {delete_replica, _StreamId, #{node := Node}},
+ #stream{members = Members0,
+ epoch = _Epoch,
+ nodes = Nodes,
+ target = _} = Stream0) ->
+ case maps:is_key(Node, Members0) of
+ true ->
+ %% TODO: check of duplicate
+ Members = maps:map(
+ fun (K, M) when K == Node ->
+ M#member{target = deleted};
+ (_, #member{target = running} = M) ->
+ M#member{target = stopped};
+ (_, M) ->
+ M
+ end, Members0),
+ Stream0#stream{members = Members,
+ nodes = lists:delete(Node, Nodes)};
+ false ->
+ Stream0
+ end;
+update_stream0(#{system_time := _Ts},
+ {member_started, _StreamId,
+ #{epoch := E,
+ index := Idx,
+ pid := Pid} = Args}, #stream{epoch = E,
+ members = Members} = Stream0) ->
+ Node = node(Pid),
+ case maps:get(Node, Members, undefined) of
+ #member{role = {_, E},
+ current = {starting, Idx},
+ state = _} = Member0 ->
+ %% this is what we expect, leader epoch should match overall
+ %% epoch
+ Member = Member0#member{state = {running, E, Pid},
+ current = undefined},
+ %% TODO: we need to tell the machine to monitor the leader
+ Stream0#stream{members =
+ Members#{Node => Member}};
+ Member ->
+ %% do we just ignore any members started events from unexpected
+ %% epochs?
+ rabbit_log:warning("~s: member started unexpected ~w ~w",
+ [?MODULE, Args, Member]),
+ Stream0
+ end;
+update_stream0(#{system_time := _Ts},
+ {member_deleted, _StreamId, #{node := Node}},
+ #stream{nodes = Nodes,
+ members = Members0} = Stream0) ->
+ case maps:take(Node, Members0) of
+ {_, Members} when map_size(Members) == 0 ->
+ undefined;
+ {#member{state = _}, Members} ->
+ %% this is what we expect, leader epoch should match overall
+ %% epoch
+ Stream0#stream{nodes = lists:delete(Node, Nodes),
+ members = Members};
+ _ ->
+ %% do we just ignore any writer_started events from unexpected
+ %% epochs?
+ Stream0
+ end;
+update_stream0(#{system_time := _Ts},
+ {member_stopped, _StreamId,
+ #{node := Node,
+ index := Idx,
+ epoch := StoppedEpoch,
+ tail := Tail}}, #stream{epoch = Epoch,
+ target = Target,
+ nodes = Nodes,
+ members = Members0} = Stream0) ->
+ IsLeaderInCurrent = case find_leader(Members0) of
+ {#member{role = {writer, Epoch},
+ target = running,
+ state = {ready, Epoch}}, _} ->
+ true;
+ {#member{role = {writer, Epoch},
+ target = running,
+ state = {running, Epoch, _}}, _} ->
+ true;
+ _ ->
+ false
+ end,
+ case maps:get(Node, Members0) of
+ #member{role = {replica, Epoch},
+ current = {stopping, Idx},
+ state = _} = Member0
+ when IsLeaderInCurrent ->
+ %% A leader has already been selected so skip straight to ready state
+ Member = update_target(Member0#member{state = {ready, Epoch},
+ current = undefined}, Target),
+ Members1 = Members0#{Node => Member},
+ Stream0#stream{members = Members1};
+ #member{role = {_, Epoch},
+ current = {stopping, Idx},
+ state = _} = Member0 ->
+ %% this is what we expect, member epoch should match overall
+ %% epoch
+ Member = case StoppedEpoch of
+ Epoch ->
+ update_target(Member0#member{state = {stopped, StoppedEpoch, Tail},
+ current = undefined}, Target);
+ _ ->
+ %% if stopped epoch is from another epoch
+ %% leave target as is to retry stop in current term
+ Member0#member{state = {stopped, StoppedEpoch, Tail},
+ current = undefined}
+ end,
+
+ Members1 = Members0#{Node => Member},
+
+ Offsets = [{N, T}
+ || #member{state = {stopped, E, T},
+ target = running,
+ node = N} <- maps:values(Members1),
+ E == Epoch],
+ case is_quorum(length(Nodes), length(Offsets)) of
+ true ->
+ %% select leader
+ NewWriterNode = select_leader(Offsets),
+ NextEpoch = Epoch + 1,
+ Members = maps:map(
+ fun (N, #member{state = {stopped, E, _}} = M)
+ when E == Epoch ->
+ case NewWriterNode of
+ N ->
+ %% new leader
+ M#member{role = {writer, NextEpoch},
+ state = {ready, NextEpoch}};
+ _ ->
+ M#member{role = {replica, NextEpoch},
+ state = {ready, NextEpoch}}
+ end;
+ (_N, #member{target = deleted} = M) ->
+ M;
+ (_N, M) ->
+ M#member{role = {replica, NextEpoch}}
+ end, Members1),
+ Stream0#stream{epoch = NextEpoch,
+ members = Members};
+ false ->
+ Stream0#stream{members = Members1}
+ end;
+ _Member ->
+ Stream0
+ end;
+update_stream0(#{system_time := _Ts},
+ {mnesia_updated, _StreamId, #{epoch := E}},
+ Stream0) ->
+ %% reset mnesia state
+ case Stream0 of
+ undefined ->
+ undefined;
_ ->
- [Node | Nodes]
+ Stream0#stream{mnesia = {updated, E}}
+ end;
+update_stream0(#{system_time := _Ts},
+ {retention_updated, _StreamId, #{node := Node}},
+ #stream{members = Members0,
+ conf = Conf} = Stream0) ->
+ Members = maps:update_with(Node, fun (M) ->
+ M#member{current = undefined,
+ conf = Conf}
+ end, Members0),
+ Stream0#stream{members = Members};
+update_stream0(#{system_time := _Ts},
+ {action_failed, _StreamId, #{action := updating_mnesia}},
+ #stream{mnesia = {_, E}} = Stream0) ->
+ Stream0#stream{mnesia = {updated, E}};
+update_stream0(#{system_time := _Ts},
+ {action_failed, _StreamId,
+ #{node := Node,
+ index := Idx,
+ action := Action,
+ epoch := _Epoch}}, #stream{members = Members0} = Stream0) ->
+ Members1 = maps:update_with(Node,
+ fun (#member{current = {C, I}} = M)
+ when C == Action andalso I == Idx ->
+ M#member{current = undefined};
+ (M) ->
+ M
+ end, Members0),
+ case Members0 of
+ #{Node := #member{role = {writer, E},
+ state = {ready, E},
+ current = {starting, Idx}}}
+ when Action == starting ->
+ %% the leader failed to start = we need a new election
+ %% stop all members
+ Members = set_running_to_stopped(Members1),
+ Stream0#stream{members = Members};
+ _ ->
+ Stream0#stream{members = Members1}
+ end;
+update_stream0(#{system_time := _Ts},
+ {down, Pid, Reason},
+ #stream{epoch = E,
+ members = Members0} = Stream0) ->
+ DownNode = node(Pid),
+ case Members0 of
+ #{DownNode := #member{role = {writer, E},
+ state = {running, E, Pid}} = Member} ->
+ Members1 = Members0#{DownNode => Member#member{state = {down, E}}},
+ %% leader is down, set all members that should be running to stopped
+ Members = maps:map(fun (_, #member{target = running} = M) ->
+ M#member{target = stopped};
+ (_, M) ->
+ M
+ end, Members1),
+ Stream0#stream{members = Members};
+ #{DownNode := #member{role = {replica, _},
+ state = {running, _, Pid}} = Member}
+ when Reason == noconnection ->
+ %% mark process as disconnected such that we don't set it to down until
+ %% the node is back and we can re-monitor
+ Members = Members0#{DownNode =>
+ Member#member{state = {disconnected, E, Pid}}},
+ Stream0#stream{members = Members};
+ #{DownNode := #member{role = {replica, _},
+ state = {S, _, Pid}} = Member}
+ when S == running orelse S == disconnected ->
+ %% the down process is currently running with the correct Pid
+ %% set state to down
+ Members = Members0#{DownNode => Member#member{state = {down, E}}},
+ Stream0#stream{members = Members};
+ _ ->
+ Stream0
+ end;
+update_stream0(#{system_time := _Ts},
+ {down, _Pid, _Reason}, undefined) ->
+ undefined;
+update_stream0(#{system_time := _Ts} = _Meta,
+ {nodeup, Node},
+ #stream{members = Members0} = Stream0) ->
+ Members = maps:map(
+ fun (_, #member{node = N,
+ current = {sleeping, nodeup}} = M)
+ when N == Node ->
+ M#member{current = undefined};
+ (_, M) ->
+ M
+ end, Members0),
+ Stream0#stream{members = Members};
+update_stream0(#{system_time := _Ts},
+ {policy_changed, _StreamId, #{queue := Q}},
+ #stream{conf = Conf0,
+ members = _Members0} = Stream0) ->
+ Conf = rabbit_stream_queue:update_stream_conf(Q, Conf0),
+ Stream0#stream{conf = Conf};
+update_stream0(_Meta, _Cmd, undefined) ->
+ undefined.
+
+eval_listeners(#stream{listeners = Listeners0,
+ queue_ref = QRef,
+ members = Members} = Stream, Effects0) ->
+ case find_leader(Members) of
+ {#member{state = {running, _, LeaderPid}}, _} ->
+ %% a leader is running, check all listeners to see if any of them
+ %% has not been notified of the current leader pid
+ {Listeners, Effects} =
+ maps:fold(
+ fun(_, P, Acc) when P == LeaderPid ->
+ Acc;
+ (LPid, _, {L, Acc}) ->
+ {L#{LPid => LeaderPid},
+ [{send_msg, LPid,
+ {queue_event, QRef,
+ {stream_leader_change, LeaderPid}},
+ cast} | Acc]}
+ end, {Listeners0, Effects0}, Listeners0),
+ {Stream#stream{listeners = Listeners}, Effects};
+ _ ->
+ {Stream, Effects0}
end.
-delete_replica_pid(Node, ReplicaPids) ->
- lists:partition(fun(P) -> node(P) =/= Node end, ReplicaPids).
-
-apply_leader_locator_strategy(#{leader_locator_strategy := <<"client-local">>} = Conf, _) ->
- Conf;
-apply_leader_locator_strategy(#{leader_node := Leader,
- replica_nodes := Replicas0,
- leader_locator_strategy := <<"random">>,
- name := StreamId} = Conf, _) ->
- Replicas = [Leader | Replicas0],
- ClusterSize = length(Replicas),
- Hash = erlang:phash2(StreamId),
- Pos = (Hash rem ClusterSize) + 1,
- NewLeader = lists:nth(Pos, Replicas),
- NewReplicas = lists:delete(NewLeader, Replicas),
- Conf#{leader_node => NewLeader,
- replica_nodes => NewReplicas};
-apply_leader_locator_strategy(#{leader_node := Leader,
- replica_nodes := Replicas0,
- leader_locator_strategy := <<"least-leaders">>} = Conf,
- Streams) ->
- Replicas = [Leader | Replicas0],
- Counters0 = maps:from_list([{R, 0} || R <- Replicas]),
- Counters = maps:to_list(maps:fold(fun(_Key, #{conf := #{leader_node := L}}, Acc) ->
- maps:update_with(L, fun(V) -> V + 1 end, 0, Acc)
- end, Counters0, Streams)),
- Ordered = lists:sort(fun({_, V1}, {_, V2}) ->
- V1 =< V2
- end, Counters),
- %% We could have potentially introduced nodes that are not in the list of replicas if
- %% initial cluster size is smaller than the cluster size. Let's select the first one
- %% that is on the list of replicas
- NewLeader = select_first_matching_node(Ordered, Replicas),
- NewReplicas = lists:delete(NewLeader, Replicas),
- Conf#{leader_node => NewLeader,
- replica_nodes => NewReplicas}.
-
-select_first_matching_node([{N, _} | Rest], Replicas) ->
- case lists:member(N, Replicas) of
- true -> N;
- false -> select_first_matching_node(Rest, Replicas)
+eval_retention(#{index := Idx} = Meta,
+ #stream{conf = #{retention := Ret} = Conf,
+ id = StreamId,
+ epoch = Epoch,
+ members = Members} = Stream, Effects0) ->
+ NeedUpdate = maps:filter(
+ fun (_, #member{state = {running, _, _},
+ current = undefined,
+ conf = C}) ->
+ Ret =/= maps:get(retention, C, undefined);
+ (_, _) ->
+ false
+ end, Members),
+ Args = Meta#{epoch => Epoch},
+ Effs = [{aux, {update_retention, StreamId,
+ Args#{pid => Pid,
+ node => node(Pid),
+ retention => Ret}, Conf}}
+ || #member{state = {running, _, Pid}} <- maps:values(NeedUpdate)],
+ Updated = maps:map(fun (_, M) -> M#member{current = {updating, Idx}} end,
+ NeedUpdate),
+ {Stream#stream{members = maps:merge(Members, Updated)}, Effs ++ Effects0}.
+
+
+%% this function should be idempotent,
+%% it should modify the state such that it won't issue duplicate
+%% actions when called again
+evaluate_stream(#{index := Idx} = Meta,
+ #stream{id = StreamId,
+ reply_to = From,
+ epoch = Epoch,
+ mnesia = {MnesiaTag, MnesiaEpoch},
+ members = Members0} = Stream0, Effs0) ->
+ case find_leader(Members0) of
+ {#member{state = LState,
+ node = LeaderNode,
+ target = deleted,
+ current = undefined} = Writer0, Replicas}
+ when LState =/= deleted ->
+ Action = {aux, {delete_member, StreamId, LeaderNode,
+ make_writer_conf(Writer0, Stream0)}},
+ Writer = Writer0#member{current = {deleting, Idx}},
+ Effs = [Action | Effs0],
+ Stream = Stream0#stream{reply_to = undefined},
+ eval_replicas(Meta, Writer, Replicas, Stream, Effs);
+ {#member{state = {down, Epoch},
+ target = stopped,
+ node = LeaderNode,
+ current = undefined} = Writer0, Replicas} ->
+ %% leader is down - all replicas need to be stopped
+ %% and tail infos retrieved
+ %% some replicas may already be in stopping or ready state
+ Args = Meta#{epoch => Epoch,
+ node => LeaderNode},
+ Conf = make_writer_conf(Writer0, Stream0),
+ Action = {aux, {stop, StreamId, Args, Conf}},
+ Writer = Writer0#member{current = {stopping, Idx}},
+ eval_replicas(Meta, Writer, Replicas, Stream0, [Action | Effs0]);
+ {#member{state = {ready, Epoch}, %% writer ready in current epoch
+ target = running,
+ node = LeaderNode,
+ current = undefined} = Writer0, _Replicas} ->
+ %% ready check has been completed and a new leader has been chosen
+ %% time to start writer,
+ %% if leader start fails, revert back to down state for all and re-run
+ WConf = make_writer_conf(Writer0, Stream0),
+ Members = Members0#{LeaderNode =>
+ Writer0#member{current = {starting, Idx},
+ conf = WConf}},
+ Args = Meta#{node => LeaderNode, epoch => Epoch},
+ Actions = [{aux, {start_writer, StreamId, Args, WConf}} | Effs0],
+ {Stream0#stream{members = Members}, Actions};
+ {#member{state = {running, Epoch, LeaderPid},
+ target = running} = Writer, Replicas} ->
+ Effs1 = case From of
+ undefined ->
+ Effs0;
+ _ ->
+ %% we need a reply effect here
+ wrap_reply(From, {ok, LeaderPid}) ++ Effs0
+ end,
+ Stream1 = Stream0#stream{reply_to = undefined},
+ case MnesiaTag == updated andalso MnesiaEpoch < Epoch of
+ true ->
+ Args = Meta#{node => node(LeaderPid), epoch => Epoch},
+ Effs = [{aux,
+ {update_mnesia, StreamId, Args,
+ make_replica_conf(LeaderPid, Stream1)}} | Effs1],
+ Stream = Stream1#stream{mnesia = {updating, MnesiaEpoch}},
+ eval_replicas(Meta, Writer, Replicas, Stream, Effs);
+ false ->
+ eval_replicas(Meta, Writer, Replicas, Stream1, Effs1)
+ end;
+ {#member{state = S,
+ target = stopped,
+ node = LeaderNode,
+ current = undefined} = Writer0, Replicas}
+ when element(1, S) =/= stopped ->
+ %% leader should be stopped
+ Args = Meta#{node => LeaderNode, epoch => Epoch},
+ Action = {aux, {stop, StreamId, Args,
+ make_writer_conf(Writer0, Stream0)}},
+ Writer = Writer0#member{current = {stopping, Idx}},
+ eval_replicas(Meta, Writer, Replicas, Stream0, [Action | Effs0]);
+ {Writer, Replicas} ->
+ eval_replicas(Meta, Writer, Replicas, Stream0, Effs0)
+ end.
+
+eval_replicas(Meta, undefined, Replicas, Stream, Actions0) ->
+ {Members, Actions} = lists:foldl(
+ fun (R, Acc) ->
+ eval_replica(Meta, R, deleted, Stream, Acc)
+ end, {#{}, Actions0},
+ Replicas),
+ {Stream#stream{members = Members}, Actions};
+eval_replicas(Meta, #member{state = LeaderState,
+ node = WriterNode} = Writer, Replicas,
+ Stream, Actions0) ->
+ {Members, Actions} = lists:foldl(
+ fun (R, Acc) ->
+ eval_replica(Meta, R, LeaderState,
+ Stream, Acc)
+ end, {#{WriterNode => Writer}, Actions0},
+ Replicas),
+ {Stream#stream{members = Members}, Actions}.
+
+eval_replica(#{index := Idx} = Meta,
+ #member{state = _State,
+ target = stopped,
+ node = Node,
+ current = undefined} = Replica,
+ _LeaderState,
+ #stream{id = StreamId,
+ epoch = Epoch,
+ conf = Conf0},
+ {Replicas, Actions}) ->
+ %% if we're not running anything and we aren't stopped and not caught
+ %% by previous clauses we probably should stop
+ Args = Meta#{node => Node, epoch => Epoch},
+
+ Conf = Conf0#{epoch => Epoch},
+ {Replicas#{Node => Replica#member{current = {stopping, Idx}}},
+ [{aux, {stop, StreamId, Args, Conf}} | Actions]};
+eval_replica(#{index := Idx} = Meta, #member{state = _,
+ node = Node,
+ current = Current,
+ target = deleted} = Replica,
+ _LeaderState, #stream{id = StreamId,
+ epoch = Epoch,
+ conf = Conf}, {Replicas, Actions0}) ->
+
+ case Current of
+ undefined ->
+ Args = Meta#{epoch => Epoch, node => Node},
+ Actions = [{aux, {delete_member, StreamId, Args, Conf}} |
+ Actions0],
+ {Replicas#{Node => Replica#member{current = {deleting, Idx}}},
+ Actions};
+ _ ->
+ {Replicas#{Node => Replica}, Actions0}
+ end;
+eval_replica(#{index := Idx} = Meta, #member{state = {State, Epoch},
+ node = Node,
+ target = running,
+ current = undefined} = Replica,
+ {running, Epoch, Pid},
+ #stream{id = StreamId,
+ epoch = Epoch} = Stream,
+ {Replicas, Actions})
+ when State == ready; State == down ->
+ %% replica is down or ready and the leader is running
+ %% time to start it
+ Conf = make_replica_conf(Pid, Stream),
+ Args = Meta#{node => Node, epoch => Epoch},
+ {Replicas#{Node => Replica#member{current = {starting, Idx},
+ conf = Conf}},
+ [{aux, {start_replica, StreamId, Args, Conf}} | Actions]};
+eval_replica(_Meta, #member{state = {running, Epoch, _},
+ target = running,
+ node = Node} = Replica,
+ {running, Epoch, _}, _Stream, {Replicas, Actions}) ->
+ {Replicas#{Node => Replica}, Actions};
+eval_replica(_Meta, #member{state = {stopped, _E, _},
+ node = Node,
+ current = undefined} = Replica,
+ _LeaderState, _Stream,
+ {Replicas, Actions}) ->
+ %% if stopped we should just wait for a quorum to reach stopped and
+ %% update_stream will move to ready state
+ {Replicas#{Node => Replica}, Actions};
+eval_replica(_Meta, #member{state = {ready, E},
+ target = running,
+ node = Node,
+ current = undefined} = Replica,
+ {ready, E}, _Stream,
+ {Replicas, Actions}) ->
+ %% if we're ready and so is the leader we just wait a swell
+ {Replicas#{Node => Replica}, Actions};
+eval_replica(_Meta, #member{node = Node} = Replica, _LeaderState, _Stream,
+ {Replicas, Actions}) ->
+ {Replicas#{Node => Replica}, Actions}.
+
+fail_active_actions(Streams, Exclude) ->
+ maps:map(
+ fun (_, #stream{id = Id, members = Members})
+ when not is_map_key(Id, Exclude) ->
+ _ = maps:map(fun(_, M) ->
+ fail_action(Id, M)
+ end, Members)
+ end, Streams),
+
+ ok.
+
+fail_action(_StreamId, #member{current = undefined}) ->
+ ok;
+fail_action(StreamId, #member{role = {_, E},
+ current = {Action, Idx},
+ node = Node}) ->
+ rabbit_log:debug("~s: failing active action for ~s node ~w Action ~w",
+ [?MODULE, StreamId, Node, Action]),
+ %% if we have an action send failure message
+ send_self_command({action_failed, StreamId,
+ #{action => Action,
+ index => Idx,
+ node => Node,
+ epoch => E}}).
+
+ensure_monitors(#stream{id = StreamId,
+ members = Members}, Monitors, Effects) ->
+ maps:fold(
+ fun
+ (_, #member{state = {running, _, Pid}}, {M, E})
+ when not is_map_key(Pid, M) ->
+ {M#{Pid => {StreamId, member}},
+ [{monitor, process, Pid},
+ %% ensure we're always monitoring the node as well
+ {monitor, node, node(Pid)} | E]};
+ (_, _, Acc) ->
+ Acc
+ end, {Monitors, Effects}, Members).
+
+make_replica_conf(LeaderPid,
+ #stream{epoch = Epoch,
+ nodes = Nodes,
+ conf = Conf}) ->
+ LeaderNode = node(LeaderPid),
+ Conf#{leader_node => LeaderNode,
+ nodes => Nodes,
+ leader_pid => LeaderPid,
+ replica_nodes => lists:delete(LeaderNode, Nodes),
+ epoch => Epoch}.
+
+make_writer_conf(#member{node = Node}, #stream{epoch = Epoch,
+ nodes = Nodes,
+ conf = Conf}) ->
+ Conf#{leader_node => Node,
+ nodes => Nodes,
+ replica_nodes => lists:delete(Node, Nodes),
+ epoch => Epoch}.
+
+
+find_leader(Members) ->
+ case lists:partition(
+ fun (#member{target = deleted}) ->
+ false;
+ (#member{role = {Role, _}}) ->
+ Role == writer
+ end, maps:values(Members)) of
+ {[Writer], Replicas} ->
+ {Writer, Replicas};
+ {[], Replicas} ->
+ {undefined, Replicas}
end.
+
+select_leader(Offsets) ->
+ [{Node, _} | _] = lists:sort(fun({_, {Ao, E}}, {_, {Bo, E}}) ->
+ Ao >= Bo;
+ ({_, {_, Ae}}, {_, {_, Be}}) ->
+ Ae >= Be;
+ ({_, empty}, _) ->
+ false;
+ (_, {_, empty}) ->
+ true
+ end, Offsets),
+ Node.
+
+maybe_sleep({{nodedown, _}, _}) ->
+ timer:sleep(10000);
+maybe_sleep({noproc, _}) ->
+ timer:sleep(5000);
+maybe_sleep({error, nodedown}) ->
+ timer:sleep(5000);
+maybe_sleep({error, _}) ->
+ timer:sleep(5000);
+maybe_sleep(_) ->
+ ok.
+
+set_running_to_stopped(Members) ->
+ maps:map(fun (_, #member{target = running} = M) ->
+ M#member{target = stopped};
+ (_, M) ->
+ M
+ end, Members).
+
+update_target(#member{target = deleted} = Member, _) ->
+ %% A deleted member can never transition to another state
+ Member;
+update_target(Member, Target) ->
+ Member#member{target = Target}.
diff --git a/deps/rabbit/src/rabbit_stream_coordinator.hrl b/deps/rabbit/src/rabbit_stream_coordinator.hrl
new file mode 100644
index 0000000000..cbf6b69b1e
--- /dev/null
+++ b/deps/rabbit/src/rabbit_stream_coordinator.hrl
@@ -0,0 +1,63 @@
+
+-define(STREAM_COORDINATOR_STARTUP, {stream_coordinator_startup, self()}).
+-define(TICK_TIMEOUT, 30000).
+-define(RESTART_TIMEOUT, 1000).
+-define(PHASE_RETRY_TIMEOUT, 10000).
+-define(CMD_TIMEOUT, 30000).
+-define(RA_SYSTEM, coordination).
+
+-type stream_id() :: string().
+-type stream() :: #{conf := osiris:config(),
+ atom() => term()}.
+-type monitor_role() :: member | listener.
+-type queue_ref() :: rabbit_types:r(queue).
+-type tail() :: {osiris:offset(), osiris:epoch()} | empty.
+
+-record(member,
+ {state = {down, 0} :: {down, osiris:epoch()}
+ | {stopped, osiris:epoch(), tail()}
+ | {ready, osiris:epoch()}
+ %% when a replica disconnects
+ | {running | disconnected, osiris:epoch(), pid()}
+ | deleted,
+ role :: {writer | replica, osiris:epoch()},
+ node :: node(),
+ %% the currently running action, if any
+ current :: undefined |
+ {updating |
+ stopping |
+ starting |
+ deleting, ra:index()} |
+ {sleeping, nodeup | non_neg_integer()},
+ %% record the "current" config used
+ conf :: undefined | osiris:config(),
+ target = running :: running | stopped | deleted}).
+
+%% member lifecycle
+%% down -> stopped(tail) -> running | disconnected -> deleted
+%%
+%% split the handling of incoming events (down, success | fail of operations)
+%% and the actioning of current state (i.e. member A is down but the cluster target
+%% is `up` - start a current action to turn member A -> running
+
+-type from() :: {pid(), reference()}.
+
+-record(stream, {id :: stream_id(),
+ epoch = 0 :: osiris:epoch(),
+ queue_ref :: queue_ref(),
+ conf :: osiris:config(),
+ nodes :: [node()],
+ members = #{} :: #{node() := #member{}},
+ listeners = #{} :: #{pid() := LeaderPid :: pid()},
+ reply_to :: undefined | from(),
+ mnesia = {updated, 0} :: {updated | updating, osiris:epoch()},
+ target = running :: running | deleted
+ }).
+
+-record(?MODULE, {streams = #{} :: #{stream_id() => #stream{}},
+ monitors = #{} :: #{pid() => {stream_id(), monitor_role()}},
+ listeners = #{} :: #{stream_id() =>
+ #{pid() := queue_ref()}},
+ %% future extensibility
+ reserved_1,
+ reserved_2}).
diff --git a/deps/rabbit/src/rabbit_stream_queue.erl b/deps/rabbit/src/rabbit_stream_queue.erl
index d130c7225b..dee8740a29 100644
--- a/deps/rabbit/src/rabbit_stream_queue.erl
+++ b/deps/rabbit/src/rabbit_stream_queue.erl
@@ -1,16 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream_queue).
@@ -37,23 +29,33 @@
update/2,
state_info/1,
stat/1,
- capabilities/0]).
+ capabilities/0,
+ notify_decorators/1]).
-export([set_retention_policy/3]).
-export([add_replica/3,
delete_replica/3]).
-export([format_osiris_event/2]).
--export([update_stream_conf/1]).
+-export([update_stream_conf/2]).
+-export([readers/1]).
+
+-export([parse_offset_arg/1]).
--include("rabbit.hrl").
+-export([status/2,
+ tracking_status/2]).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
-include("amqqueue.hrl").
-define(INFO_KEYS, [name, durable, auto_delete, arguments, leader, members, online, state,
messages, messages_ready, messages_unacknowledged, committed_offset,
- policy, operator_policy, effective_policy_definition, type]).
+ policy, operator_policy, effective_policy_definition, type, memory]).
-type appender_seq() :: non_neg_integer().
+-type msg_id() :: non_neg_integer().
+-type msg() :: term(). %% TODO: refine
+
-record(stream, {name :: rabbit_types:r('queue'),
credit :: integer(),
max :: non_neg_integer(),
@@ -61,13 +63,16 @@
listening_offset = 0 :: non_neg_integer(),
log :: undefined | osiris_log:state()}).
--record(stream_client, {name :: term(),
+-record(stream_client, {stream_id :: string(),
+ name :: term(),
leader :: pid(),
+ local_pid :: undefined | pid(),
next_seq = 1 :: non_neg_integer(),
- correlation = #{} :: #{appender_seq() => term()},
+ correlation = #{} :: #{appender_seq() => {msg_id(), msg()}},
soft_limit :: non_neg_integer(),
slow = false :: boolean(),
- readers = #{} :: #{term() => #stream{}}
+ readers = #{} :: #{term() => #stream{}},
+ writer_id :: binary()
}).
-import(rabbit_queue_type_util, [args_policy_lookup/3]).
@@ -88,42 +93,45 @@ declare(Q0, Node) when ?amqqueue_is_stream(Q0) ->
fun rabbit_queue_type_util:check_non_durable/1],
Q0) of
ok ->
- start_cluster(Q0, Node);
+ create_stream(Q0, Node);
Err ->
Err
end.
-start_cluster(Q0, Node) ->
+create_stream(Q0, Node) ->
Arguments = amqqueue:get_arguments(Q0),
QName = amqqueue:get_name(Q0),
Opts = amqqueue:get_options(Q0),
ActingUser = maps:get(user, Opts, ?UNKNOWN_USER),
Conf0 = make_stream_conf(Node, Q0),
- case rabbit_stream_coordinator:start_cluster(
- amqqueue:set_type_state(Q0, Conf0)) of
- {ok, {error, already_started}, _} ->
- {protocol_error, precondition_failed, "safe queue name already in use '~s'",
- [Node]};
- {ok, {created, Q}, _} ->
- rabbit_event:notify(queue_created,
- [{name, QName},
- {durable, true},
- {auto_delete, false},
- {arguments, Arguments},
- {user_who_performed_action,
- ActingUser}]),
- {new, Q};
- {ok, {error, Error}, _} ->
- _ = rabbit_amqqueue:internal_delete(QName, ActingUser),
- {protocol_error, internal_error, "Cannot declare a queue '~s' on node '~s': ~255p",
- [rabbit_misc:rs(QName), node(), Error]};
- {ok, {existing, Q}, _} ->
+ Conf = apply_leader_locator_strategy(Conf0),
+ #{leader_node := LeaderNode} = Conf,
+ Q1 = amqqueue:set_type_state(Q0, Conf),
+ case rabbit_amqqueue:internal_declare(Q1, false) of
+ {created, Q} ->
+ case rabbit_stream_coordinator:new_stream(Q, LeaderNode) of
+ {ok, {ok, LeaderPid}, _} ->
+ %% update record with leader pid
+ set_leader_pid(LeaderPid, amqqueue:get_name(Q)),
+ rabbit_event:notify(queue_created,
+ [{name, QName},
+ {durable, true},
+ {auto_delete, false},
+ {arguments, Arguments},
+ {type, amqqueue:get_type(Q1)},
+ {user_who_performed_action,
+ ActingUser}]),
+ {new, Q};
+ Error ->
+
+ _ = rabbit_amqqueue:internal_delete(QName, ActingUser),
+ {protocol_error, internal_error, "Cannot declare a queue '~s' on node '~s': ~255p",
+ [rabbit_misc:rs(QName), node(), Error]}
+ end;
+ {existing, Q} ->
{existing, Q};
- {error, coordinator_unavailable} ->
- _ = rabbit_amqqueue:internal_delete(QName, ActingUser),
- {protocol_error, internal_error,
- "Cannot declare a queue '~s' on node '~s': coordinator unavailable",
- [rabbit_misc:rs(QName), node()]}
+ {absent, Q, Reason} ->
+ {absent, Q, Reason}
end.
-spec delete(amqqueue:amqqueue(), boolean(),
@@ -131,9 +139,13 @@ start_cluster(Q0, Node) ->
rabbit_types:ok(non_neg_integer()) |
rabbit_types:error(in_use | not_empty).
delete(Q, _IfUnused, _IfEmpty, ActingUser) ->
- Name = maps:get(name, amqqueue:get_type_state(Q)),
- {ok, Reply, _} = rabbit_stream_coordinator:delete_cluster(Name, ActingUser),
- Reply.
+ case rabbit_stream_coordinator:delete_stream(Q, ActingUser) of
+ {ok, Reply} ->
+ Reply;
+ Error ->
+ {protocol_error, internal_error, "Cannot delete queue '~s' on node '~s': ~255p ",
+ [rabbit_misc:rs(amqqueue:get_name(Q)), node(), Error]}
+ end.
-spec purge(amqqueue:amqqueue()) ->
{ok, non_neg_integer()} | {error, term()}.
@@ -142,12 +154,22 @@ purge(_) ->
-spec policy_changed(amqqueue:amqqueue()) -> 'ok'.
policy_changed(Q) ->
- Name = maps:get(name, amqqueue:get_type_state(Q)),
- _ = rabbit_stream_coordinator:policy_changed(Name),
+ _ = rabbit_stream_coordinator:policy_changed(Q),
ok.
-stat(_) ->
- {ok, 0, 0}.
+stat(Q) ->
+ Conf = amqqueue:get_type_state(Q),
+ case maps:get(leader_node, Conf) of
+ Node when Node =/= node() ->
+ case rpc:call(Node, ?MODULE, info, [Q, [messages]]) of
+ {badrpc, _} ->
+ {ok, 0, 0};
+ [{messages, Messages}] ->
+ {ok, Messages, 0}
+ end;
+ _ ->
+ {ok, i(messages, Q), 0}
+ end.
consume(Q, #{prefetch_count := 0}, _)
when ?amqqueue_is_stream(Q) ->
@@ -173,61 +195,99 @@ consume(Q, Spec, QState0) when ?amqqueue_is_stream(Q) ->
args := Args,
ok_msg := OkMsg} = Spec,
QName = amqqueue:get_name(Q),
- Offset = case rabbit_misc:table_lookup(Args, <<"x-stream-offset">>) of
- undefined ->
- next;
- {_, <<"first">>} ->
- first;
- {_, <<"last">>} ->
- last;
- {_, <<"next">>} ->
- next;
- {_, V} ->
- V
- end,
- rabbit_core_metrics:consumer_created(ChPid, ConsumerTag, ExclusiveConsume,
- not NoAck, QName,
- ConsumerPrefetchCount, false,
- up, Args),
- %% FIXME: reply needs to be sent before the stream begins sending
- %% really it should be sent by the stream queue process like classic queues
- %% do
- maybe_send_reply(ChPid, OkMsg),
- QState = begin_stream(QState0, Q, ConsumerTag, Offset,
- ConsumerPrefetchCount),
- {ok, QState, []};
+ case parse_offset_arg(rabbit_misc:table_lookup(Args, <<"x-stream-offset">>)) of
+ {error, _} = Err ->
+ Err;
+ {ok, OffsetSpec} ->
+ rabbit_core_metrics:consumer_created(ChPid, ConsumerTag, ExclusiveConsume,
+ not NoAck, QName,
+ ConsumerPrefetchCount, false,
+ up, Args),
+ %% FIXME: reply needs to be sent before the stream begins sending
+ %% really it should be sent by the stream queue process like classic queues
+ %% do
+ maybe_send_reply(ChPid, OkMsg),
+ begin_stream(QState0, Q, ConsumerTag, OffsetSpec, ConsumerPrefetchCount)
+ end;
Err ->
Err
end.
-get_local_pid(#{leader_pid := Pid}) when node(Pid) == node() ->
- Pid;
-get_local_pid(#{replica_pids := ReplicaPids}) ->
- [Local | _] = lists:filter(fun(Pid) ->
- node(Pid) == node()
- end, ReplicaPids),
- Local.
+-spec parse_offset_arg(undefined |
+ osiris:offset() |
+ {longstr, binary()} |
+ {timestamp, non_neg_integer()} |
+ {term(), non_neg_integer()}) ->
+ {ok, osiris:offset_spec()} | {error, term()}.
+parse_offset_arg(undefined) ->
+ {ok, next};
+parse_offset_arg({_, <<"first">>}) ->
+ {ok, first};
+parse_offset_arg({_, <<"last">>}) ->
+ {ok, last};
+parse_offset_arg({_, <<"next">>}) ->
+ {ok, next};
+parse_offset_arg({timestamp, V}) ->
+ {ok, {timestamp, V * 1000}};
+parse_offset_arg({longstr, V}) ->
+ case rabbit_amqqueue:check_max_age(V) of
+ {error, _} = Err ->
+ Err;
+ Ms ->
+ {ok, {timestamp, erlang:system_time(millisecond) - Ms}}
+ end;
+parse_offset_arg({_, V}) ->
+ {ok, V};
+parse_offset_arg(V) ->
+ {error, {invalid_offset_arg, V}}.
+
+get_local_pid(#stream_client{local_pid = Pid} = State)
+ when is_pid(Pid) ->
+ {Pid, State};
+get_local_pid(#stream_client{leader = Pid} = State)
+ when is_pid(Pid) andalso node(Pid) == node() ->
+ {Pid, State#stream_client{local_pid = Pid}};
+get_local_pid(#stream_client{stream_id = StreamId,
+ local_pid = undefined} = State) ->
+ %% query local coordinator to get pid
+ case rabbit_stream_coordinator:local_pid(StreamId) of
+ {ok, Pid} ->
+ {Pid, State#stream_client{local_pid = Pid}};
+ {error, not_found} ->
+ {undefined, State}
+ end.
-begin_stream(#stream_client{readers = Readers0} = State,
+begin_stream(#stream_client{name = QName, readers = Readers0} = State0,
Q, Tag, Offset, Max) ->
- LocalPid = get_local_pid(amqqueue:get_type_state(Q)),
- {ok, Seg0} = osiris:init_reader(LocalPid, Offset),
- NextOffset = osiris_log:next_offset(Seg0) - 1,
- osiris:register_offset_listener(LocalPid, NextOffset),
- %% TODO: avoid double calls to the same process
- StartOffset = case Offset of
- first -> NextOffset;
- last -> NextOffset;
- next -> NextOffset;
- _ -> Offset
- end,
- Str0 = #stream{name = amqqueue:get_name(Q),
- credit = Max,
- start_offset = StartOffset,
- listening_offset = NextOffset,
- log = Seg0,
- max = Max},
- State#stream_client{readers = Readers0#{Tag => Str0}}.
+ {LocalPid, State} = get_local_pid(State0),
+ case LocalPid of
+ undefined ->
+ {error, no_local_stream_replica_available};
+ _ ->
+ CounterSpec = {{?MODULE, QName, self()}, []},
+ {ok, Seg0} = osiris:init_reader(LocalPid, Offset, CounterSpec),
+ NextOffset = osiris_log:next_offset(Seg0) - 1,
+ osiris:register_offset_listener(LocalPid, NextOffset),
+ %% TODO: avoid double calls to the same process
+ StartOffset = case Offset of
+ first -> NextOffset;
+ last -> NextOffset;
+ next -> NextOffset;
+ {timestamp, _} -> NextOffset;
+ _ -> Offset
+ end,
+ Str0 = #stream{name = amqqueue:get_name(Q),
+ credit = Max,
+ start_offset = StartOffset,
+ listening_offset = NextOffset,
+ log = Seg0,
+ max = Max},
+ Actions = [],
+ %% TODO: we need to monitor the local pid in case the stream is
+ %% restarted
+ {ok, State#stream_client{local_pid = LocalPid,
+ readers = Readers0#{Tag => Str0}}, Actions}
+ end.
cancel(_Q, ConsumerTag, OkMsg, ActingUser, #stream_client{readers = Readers0,
name = QName} = State) ->
@@ -242,11 +302,11 @@ cancel(_Q, ConsumerTag, OkMsg, ActingUser, #stream_client{readers = Readers0,
credit(CTag, Credit, Drain, #stream_client{readers = Readers0,
name = Name,
- leader = Leader} = State) ->
+ local_pid = LocalPid} = State) ->
{Readers1, Msgs} = case Readers0 of
#{CTag := #stream{credit = Credit0} = Str0} ->
Str1 = Str0#stream{credit = Credit0 + Credit},
- {Str, Msgs0} = stream_entries(Name, Leader, Str1),
+ {Str, Msgs0} = stream_entries(Name, LocalPid, Str1),
{Readers0#{CTag => Str}, Msgs0};
_ ->
{Readers0, []}
@@ -282,16 +342,17 @@ deliver(QSs, #delivery{confirm = Confirm} = Delivery) ->
deliver(_Confirm, #delivery{message = Msg, msg_seq_no = MsgId},
#stream_client{name = Name,
leader = LeaderPid,
+ writer_id = WriterId,
next_seq = Seq,
correlation = Correlation0,
soft_limit = SftLmt,
slow = Slow0} = State) ->
- ok = osiris:write(LeaderPid, Seq, msg_to_iodata(Msg)),
+ ok = osiris:write(LeaderPid, WriterId, Seq, msg_to_iodata(Msg)),
Correlation = case MsgId of
undefined ->
Correlation0;
_ when is_number(MsgId) ->
- Correlation0#{Seq => MsgId}
+ Correlation0#{Seq => {MsgId, Msg}}
end,
Slow = case maps:size(Correlation) >= SftLmt of
true when not Slow0 ->
@@ -303,16 +364,22 @@ deliver(_Confirm, #delivery{message = Msg, msg_seq_no = MsgId},
State#stream_client{next_seq = Seq + 1,
correlation = Correlation,
slow = Slow}.
+
-spec dequeue(_, _, _, client()) -> no_return().
dequeue(_, _, _, #stream_client{name = Name}) ->
{protocol_error, not_implemented, "basic.get not supported by stream queues ~s",
[rabbit_misc:rs(Name)]}.
-handle_event({osiris_written, From, Corrs}, State = #stream_client{correlation = Correlation0,
- soft_limit = SftLmt,
- slow = Slow0,
- name = Name}) ->
- MsgIds = maps:values(maps:with(Corrs, Correlation0)),
+handle_event({osiris_written, From, _WriterId, Corrs},
+ State = #stream_client{correlation = Correlation0,
+ soft_limit = SftLmt,
+ slow = Slow0,
+ name = Name}) ->
+ MsgIds = lists:sort(maps:fold(
+ fun (_Seq, {I, _M}, Acc) ->
+ [I | Acc]
+ end, [], maps:with(Corrs, Correlation0))),
+
Correlation = maps:without(Corrs, Correlation0),
Slow = case maps:size(Correlation) < SftLmt of
true when Slow0 ->
@@ -323,24 +390,23 @@ handle_event({osiris_written, From, Corrs}, State = #stream_client{correlation =
end,
{ok, State#stream_client{correlation = Correlation,
slow = Slow}, [{settled, From, MsgIds}]};
-handle_event({osiris_offset, _From, _Offs}, State = #stream_client{leader = Leader,
- readers = Readers0,
- name = Name}) ->
+handle_event({osiris_offset, _From, _Offs},
+ State = #stream_client{local_pid = LocalPid,
+ readers = Readers0,
+ name = Name}) ->
%% offset isn't actually needed as we use the atomic to read the
%% current committed
{Readers, TagMsgs} = maps:fold(
fun (Tag, Str0, {Acc, TM}) ->
- {Str, Msgs} = stream_entries(Name, Leader, Str0),
- %% HACK for now, better to just return but
- %% tricky with acks credits
- %% that also evaluate the stream
- % gen_server:cast(self(), {stream_delivery, Tag, Msgs}),
- {Acc#{Tag => Str}, [{Tag, Leader, Msgs} | TM]}
+ {Str, Msgs} = stream_entries(Name, LocalPid, Str0),
+ {Acc#{Tag => Str}, [{Tag, LocalPid, Msgs} | TM]}
end, {#{}, []}, Readers0),
Ack = true,
Deliveries = [{deliver, Tag, Ack, OffsetMsg}
|| {Tag, _LeaderPid, OffsetMsg} <- TagMsgs],
- {ok, State#stream_client{readers = Readers}, Deliveries}.
+ {ok, State#stream_client{readers = Readers}, Deliveries};
+handle_event({stream_leader_change, Pid}, State) ->
+ {ok, update_leader_pid(Pid, State), []}.
is_recoverable(Q) ->
Node = node(),
@@ -356,13 +422,13 @@ recover(_VHost, Queues) ->
end, {[], []}, Queues).
settle(complete, CTag, MsgIds, #stream_client{readers = Readers0,
- name = Name,
- leader = Leader} = State) ->
+ local_pid = LocalPid,
+ name = Name} = State) ->
Credit = length(MsgIds),
{Readers, Msgs} = case Readers0 of
#{CTag := #stream{credit = Credit0} = Str0} ->
Str1 = Str0#stream{credit = Credit0 + Credit},
- {Str, Msgs0} = stream_entries(Name, Leader, Str1),
+ {Str, Msgs0} = stream_entries(Name, LocalPid, Str1),
{Readers0#{CTag => Str}, Msgs0};
_ ->
{Readers0, []}
@@ -385,15 +451,41 @@ i(durable, Q) when ?is_amqqueue(Q) -> amqqueue:is_durable(Q);
i(auto_delete, Q) when ?is_amqqueue(Q) -> amqqueue:is_auto_delete(Q);
i(arguments, Q) when ?is_amqqueue(Q) -> amqqueue:get_arguments(Q);
i(leader, Q) when ?is_amqqueue(Q) ->
- #{leader_node := Leader} = amqqueue:get_type_state(Q),
- Leader;
+ case amqqueue:get_pid(Q) of
+ none ->
+ undefined;
+ Pid -> node(Pid)
+ end;
i(members, Q) when ?is_amqqueue(Q) ->
- #{replica_nodes := Nodes} = amqqueue:get_type_state(Q),
+ #{nodes := Nodes} = amqqueue:get_type_state(Q),
Nodes;
+i(memory, Q) when ?is_amqqueue(Q) ->
+ %% Return writer memory. It's not the full memory usage (we also have replica readers on
+ %% the writer node), but might be good enough
+ case amqqueue:get_pid(Q) of
+ none ->
+ 0;
+ Pid ->
+ try
+ {memory, M} = process_info(Pid, memory),
+ M
+ catch
+ error:badarg ->
+ 0
+ end
+ end;
i(online, Q) ->
- #{replica_pids := ReplicaPids,
- leader_pid := LeaderPid} = amqqueue:get_type_state(Q),
- [node(P) || P <- ReplicaPids ++ [LeaderPid], rabbit_misc:is_process_alive(P)];
+ #{name := StreamId} = amqqueue:get_type_state(Q),
+ case rabbit_stream_coordinator:members(StreamId) of
+ {ok, Members} ->
+ maps:fold(fun(_, {undefined, _}, Acc) ->
+ Acc;
+ (Key, _, Acc) ->
+ [Key | Acc]
+ end, [], Members);
+ {error, not_found} ->
+ []
+ end;
i(state, Q) when ?is_amqqueue(Q) ->
%% TODO the coordinator should answer this, I guess??
running;
@@ -423,9 +515,11 @@ i(messages_unacknowledged, Q) when ?is_amqqueue(Q) ->
end;
i(committed_offset, Q) ->
%% TODO should it be on a metrics table?
+ %% The queue could be removed between the list() and this call
+ %% to retrieve the overview. Let's default to '' if it's gone.
Data = osiris_counters:overview(),
maps:get(committed_offset,
- maps:get({osiris_writer, amqqueue:get_name(Q)}, Data));
+ maps:get({osiris_writer, amqqueue:get_name(Q)}, Data, #{}), '');
i(policy, Q) ->
case rabbit_policy:name(Q) of
none -> '';
@@ -441,17 +535,141 @@ i(effective_policy_definition, Q) ->
undefined -> [];
Def -> Def
end;
+i(readers, Q) ->
+ QName = amqqueue:get_name(Q),
+ Conf = amqqueue:get_type_state(Q),
+ Nodes = [maps:get(leader_node, Conf) | maps:get(replica_nodes, Conf)],
+ {Data, _} = rpc:multicall(Nodes, ?MODULE, readers, [QName]),
+ lists:flatten(Data);
i(type, _) ->
stream;
i(_, _) ->
''.
+-spec status(rabbit_types:vhost(), Name :: rabbit_misc:resource_name()) ->
+ [[{binary(), term()}]] | {error, term()}.
+status(Vhost, QueueName) ->
+ %% Handle not found queues
+ QName = #resource{virtual_host = Vhost, name = QueueName, kind = queue},
+ case rabbit_amqqueue:lookup(QName) of
+ {ok, Q} when ?amqqueue_is_classic(Q) ->
+ {error, classic_queue_not_supported};
+ {ok, Q} when ?amqqueue_is_quorum(Q) ->
+ {error, quorum_queue_not_supported};
+ {ok, Q} when ?amqqueue_is_stream(Q) ->
+ _Pid = amqqueue:get_pid(Q),
+ % Max = maps:get(max_segment_size_bytes, Conf, osiris_log:get_default_max_segment_size_bytes()),
+ [begin
+ [{role, Role},
+ get_key(node, C),
+ get_key(offset, C),
+ get_key(committed_offset, C),
+ get_key(first_offset, C),
+ get_key(readers, C),
+ get_key(segments, C)]
+ end || {Role, C} <- get_counters(Q)];
+ {error, not_found} = E ->
+ E
+ end.
+
+get_key(Key, Cnt) ->
+ {Key, maps:get(Key, Cnt, undefined)}.
+
+get_counters(Q) ->
+ #{name := StreamId} = amqqueue:get_type_state(Q),
+ {ok, Members} = rabbit_stream_coordinator:members(StreamId),
+ QName = amqqueue:get_name(Q),
+ Counters = [begin
+ Data = safe_get_overview(Node),
+ get_counter(QName, Data, #{node => Node})
+ end || Node <- maps:keys(Members)],
+ lists:filter(fun (X) -> X =/= undefined end, Counters).
+
+safe_get_overview(Node) ->
+ case rpc:call(Node, osiris_counters, overview, []) of
+ {badrpc, _} ->
+ #{node => Node};
+ Data ->
+ Data
+ end.
+
+get_counter(QName, Data, Add) ->
+ case maps:get({osiris_writer, QName}, Data, undefined) of
+ undefined ->
+ case maps:get({osiris_replica, QName}, Data, undefined) of
+ undefined ->
+ {undefined, Add};
+ M ->
+ {replica, maps:merge(Add, M)}
+ end;
+ M ->
+ {writer, maps:merge(Add, M)}
+ end.
+
+
+-spec tracking_status(rabbit_types:vhost(), Name :: rabbit_misc:resource_name()) ->
+ [[{atom(), term()}]] | {error, term()}.
+tracking_status(Vhost, QueueName) ->
+ %% Handle not found queues
+ QName = #resource{virtual_host = Vhost, name = QueueName, kind = queue},
+ case rabbit_amqqueue:lookup(QName) of
+ {ok, Q} when ?amqqueue_is_classic(Q) ->
+ {error, classic_queue_not_supported};
+ {ok, Q} when ?amqqueue_is_quorum(Q) ->
+ {error, quorum_queue_not_supported};
+ {ok, Q} when ?amqqueue_is_stream(Q) ->
+ Leader = amqqueue:get_pid(Q),
+ Map = osiris:read_tracking(Leader),
+ maps:fold(fun(Type, Trackings, Acc) ->
+ %% Convert for example 'offsets' to 'offset' or 'sequences' to 'sequence'
+ T = list_to_atom(lists:droplast(atom_to_list(Type))),
+ maps:fold(fun(TrkId, TrkData, Acc0) ->
+ [[{type, T},
+ {reference, TrkId},
+ {value, TrkData}] | Acc0]
+ end, [], Trackings) ++ Acc
+ end, [], Map);
+ {error, not_found} = E->
+ E
+ end.
+
+readers(QName) ->
+ try
+ Data = osiris_counters:overview(),
+ Readers = case maps:get({osiris_writer, QName}, Data, not_found) of
+ not_found ->
+ maps:get(readers, maps:get({osiris_replica, QName}, Data, #{}), 0);
+ Map ->
+ maps:get(readers, Map, 0)
+ end,
+ {node(), Readers}
+ catch
+ _:_ ->
+ {node(), 0}
+ end.
+
init(Q) when ?is_amqqueue(Q) ->
Leader = amqqueue:get_pid(Q),
- {ok, SoftLimit} = application:get_env(rabbit, stream_messages_soft_limit),
- #stream_client{name = amqqueue:get_name(Q),
- leader = Leader,
- soft_limit = SoftLimit}.
+ QName = amqqueue:get_name(Q),
+ #{name := StreamId} = amqqueue:get_type_state(Q),
+ %% tell us about leader changes so we can fail over
+ case rabbit_stream_coordinator:register_listener(Q) of
+ {ok, ok, _} ->
+ Prefix = erlang:pid_to_list(self()) ++ "_",
+ WriterId = rabbit_guid:binary(rabbit_guid:gen(), Prefix),
+ {ok, SoftLimit} = application:get_env(rabbit, stream_messages_soft_limit),
+ {ok, #stream_client{stream_id = StreamId,
+ name = amqqueue:get_name(Q),
+ leader = Leader,
+ writer_id = WriterId,
+ soft_limit = SoftLimit}};
+ {ok, stream_not_found, _} ->
+ {error, stream_not_found};
+ {error, coordinator_unavailable} = E ->
+ rabbit_log:warning("Failed to start stream client ~p: coordinator unavailable",
+ [rabbit_misc:rs(QName)]),
+ E
+ end.
close(#stream_client{readers = Readers}) ->
_ = maps:map(fun (_, #stream{log = Log}) ->
@@ -459,8 +677,16 @@ close(#stream_client{readers = Readers}) ->
end, Readers),
ok.
-update(_, State) ->
- State.
+update(Q, State)
+ when ?is_amqqueue(Q) ->
+ Pid = amqqueue:get_pid(Q),
+ update_leader_pid(Pid, State).
+
+update_leader_pid(Pid, #stream_client{leader = Pid} = State) ->
+ State;
+update_leader_pid(Pid, #stream_client{} = State) ->
+ rabbit_log:debug("stream client: new leader detected ~w", [Pid]),
+ resend_all(State#stream_client{leader = Pid}).
state_info(_) ->
#{}.
@@ -496,9 +722,7 @@ add_replica(VHost, Name, Node) ->
false ->
{error, node_not_running};
true ->
- #{name := StreamId} = amqqueue:get_type_state(Q),
- {ok, Reply, _} = rabbit_stream_coordinator:add_replica(StreamId, Node),
- Reply
+ rabbit_stream_coordinator:add_replica(Q, Node)
end;
E ->
E
@@ -526,30 +750,34 @@ delete_replica(VHost, Name, Node) ->
make_stream_conf(Node, Q) ->
QName = amqqueue:get_name(Q),
- Name = queue_name(QName),
- %% MaxLength = args_policy_lookup(<<"max-length">>, fun min/2, Q),
- MaxBytes = args_policy_lookup(<<"max-length-bytes">>, fun min/2, Q),
- MaxAge = max_age(args_policy_lookup(<<"max-age">>, fun max_age/2, Q)),
- MaxSegmentSize = args_policy_lookup(<<"max-segment-size">>, fun min/2, Q),
+ Name = stream_name(QName),
+ %% MaxLength = args_policy_lookup(<<"max-length">>, policy_precedence/2, Q),
+ MaxBytes = args_policy_lookup(<<"max-length-bytes">>, fun policy_precedence/2, Q),
+ MaxAge = max_age(args_policy_lookup(<<"max-age">>, fun policy_precedence/2, Q)),
+ MaxSegmentSizeBytes = args_policy_lookup(<<"stream-max-segment-size-bytes">>, fun policy_precedence/2, Q),
LeaderLocator = queue_leader_locator(args_policy_lookup(<<"queue-leader-locator">>,
- fun res_arg/2, Q)),
- InitialClusterSize = initial_cluster_size(args_policy_lookup(<<"initial-cluster-size">>,
- fun res_arg/2, Q)),
- Replicas0 = rabbit_mnesia:cluster_nodes(all) -- [Node],
+ fun policy_precedence/2, Q)),
+ InitialClusterSize = initial_cluster_size(
+ args_policy_lookup(<<"initial-cluster-size">>,
+ fun policy_precedence/2, Q)),
+ Replicas0 = rabbit_nodes:all() -- [Node],
+ %% TODO: try to avoid nodes that are not connected
Replicas = select_stream_nodes(InitialClusterSize - 1, Replicas0),
Formatter = {?MODULE, format_osiris_event, [QName]},
Retention = lists:filter(fun({_, R}) ->
R =/= undefined
end, [{max_bytes, MaxBytes},
{max_age, MaxAge}]),
- add_if_defined(max_segment_size, MaxSegmentSize, #{reference => QName,
- name => Name,
- retention => Retention,
- leader_locator_strategy => LeaderLocator,
- leader_node => Node,
- replica_nodes => Replicas,
- event_formatter => Formatter,
- epoch => 1}).
+ add_if_defined(max_segment_size_bytes, MaxSegmentSizeBytes,
+ #{reference => QName,
+ name => Name,
+ retention => Retention,
+ nodes => [Node | Replicas],
+ leader_locator_strategy => LeaderLocator,
+ leader_node => Node,
+ replica_nodes => Replicas,
+ event_formatter => Formatter,
+ epoch => 1}).
select_stream_nodes(Size, All) when length(All) =< Size ->
All;
@@ -568,20 +796,18 @@ select_stream_nodes(Size, Rest, Selected) ->
S = lists:nth(rand:uniform(length(Rest)), Rest),
select_stream_nodes(Size - 1, lists:delete(S, Rest), [S | Selected]).
-update_stream_conf(#{reference := QName} = Conf) ->
- case rabbit_amqqueue:lookup(QName) of
- {ok, Q} ->
- MaxBytes = args_policy_lookup(<<"max-length-bytes">>, fun min/2, Q),
- MaxAge = max_age(args_policy_lookup(<<"max-age">>, fun max_age/2, Q)),
- MaxSegmentSize = args_policy_lookup(<<"max-segment-size">>, fun min/2, Q),
- Retention = lists:filter(fun({_, R}) ->
- R =/= undefined
- end, [{max_bytes, MaxBytes},
- {max_age, MaxAge}]),
- add_if_defined(max_segment_size, MaxSegmentSize, Conf#{retention => Retention});
- _ ->
- Conf
- end.
+update_stream_conf(undefined, #{} = Conf) ->
+ Conf;
+update_stream_conf(Q, #{} = Conf) when ?is_amqqueue(Q) ->
+ MaxBytes = args_policy_lookup(<<"max-length-bytes">>, fun policy_precedence/2, Q),
+ MaxAge = max_age(args_policy_lookup(<<"max-age">>, fun policy_precedence/2, Q)),
+ MaxSegmentSizeBytes = args_policy_lookup(<<"stream-max-segment-size-bytes">>, fun policy_precedence/2, Q),
+ Retention = lists:filter(fun({_, R}) ->
+ R =/= undefined
+ end, [{max_bytes, MaxBytes},
+ {max_age, MaxAge}]),
+ add_if_defined(max_segment_size_bytes, MaxSegmentSizeBytes,
+ Conf#{retention => Retention}).
add_if_defined(_, undefined, Map) ->
Map;
@@ -598,32 +824,29 @@ max_age(Bin) when is_binary(Bin) ->
max_age(Age) ->
Age.
-max_age(Age1, Age2) ->
- min(rabbit_amqqueue:check_max_age(Age1), rabbit_amqqueue:check_max_age(Age2)).
-
queue_leader_locator(undefined) -> <<"client-local">>;
queue_leader_locator(Val) -> Val.
initial_cluster_size(undefined) ->
- length(rabbit_mnesia:cluster_nodes(running));
+ length(rabbit_nodes:all());
initial_cluster_size(Val) ->
Val.
-res_arg(PolVal, undefined) -> PolVal;
-res_arg(_, ArgVal) -> ArgVal.
-
-queue_name(#resource{virtual_host = VHost, name = Name}) ->
+policy_precedence(PolVal, _ArgVal) ->
+ PolVal.
+
+stream_name(#resource{virtual_host = VHost, name = Name}) ->
Timestamp = erlang:integer_to_binary(erlang:system_time()),
osiris_util:to_base64uri(erlang:binary_to_list(<<VHost/binary, "_", Name/binary, "_",
Timestamp/binary>>)).
recover(Q) ->
- rabbit_stream_coordinator:recover(),
{ok, Q}.
check_queue_exists_in_local_node(Q) ->
Conf = amqqueue:get_type_state(Q),
- AllNodes = [maps:get(leader_node, Conf) | maps:get(replica_nodes, Conf)],
+ AllNodes = [maps:get(leader_node, Conf) |
+ maps:get(replica_nodes, Conf)],
case lists:member(node(), AllNodes) of
true ->
ok;
@@ -636,10 +859,10 @@ check_queue_exists_in_local_node(Q) ->
maybe_send_reply(_ChPid, undefined) -> ok;
maybe_send_reply(ChPid, Msg) -> ok = rabbit_channel:send_command(ChPid, Msg).
-stream_entries(Name, Id, Str) ->
- stream_entries(Name, Id, Str, []).
+stream_entries(Name, LocalPid, Str) ->
+ stream_entries(Name, LocalPid, Str, []).
-stream_entries(Name, LeaderPid,
+stream_entries(Name, LocalPid,
#stream{name = QName,
credit = Credit,
start_offset = StartOffs,
@@ -651,18 +874,21 @@ stream_entries(Name, LeaderPid,
NextOffset = osiris_log:next_offset(Seg),
case NextOffset > LOffs of
true ->
- osiris:register_offset_listener(LeaderPid, NextOffset),
+ osiris:register_offset_listener(LocalPid, NextOffset),
{Str0#stream{log = Seg,
listening_offset = NextOffset}, MsgIn};
false ->
{Str0#stream{log = Seg}, MsgIn}
end;
+ {error, Err} ->
+ rabbit_log:debug("stream client: error reading chunk ~w", [Err]),
+ exit(Err);
{Records, Seg} ->
Msgs = [begin
Msg0 = binary_to_msg(QName, B),
Msg = rabbit_basic:add_header(<<"x-stream-offset">>,
long, O, Msg0),
- {Name, LeaderPid, O, false, Msg}
+ {Name, LocalPid, O, false, Msg}
end || {O, B} <- Records,
O >= StartOffs],
@@ -677,10 +903,10 @@ stream_entries(Name, LeaderPid,
false ->
%% if there are fewer Msgs than Entries0 it means there were non-events
%% in the log and we should recurse and try again
- stream_entries(Name, LeaderPid, Str, MsgIn ++ Msgs)
+ stream_entries(Name, LocalPid, Str, MsgIn ++ Msgs)
end
end;
-stream_entries(_Name, _Id, Str, Msgs) ->
+stream_entries(_Name, _LocalPid, Str, Msgs) ->
{Str, Msgs}.
binary_to_msg(#resource{virtual_host = VHost,
@@ -720,12 +946,101 @@ msg_to_iodata(#basic_message{exchange_name = #resource{name = Exchange},
rabbit_msg_record:to_iodata(R).
capabilities() ->
- #{policies => [<<"max-length-bytes">>, <<"max-age">>, <<"max-segment-size">>,
- <<"queue-leader-locator">>, <<"initial-cluster-size">>],
+ #{unsupported_policies =>
+ [ %% Classic policies
+ <<"expires">>, <<"message-ttl">>, <<"dead-letter-exchange">>,
+ <<"dead-letter-routing-key">>, <<"max-length">>,
+ <<"max-in-memory-length">>, <<"max-in-memory-bytes">>,
+ <<"max-priority">>, <<"overflow">>, <<"queue-mode">>,
+ <<"single-active-consumer">>, <<"delivery-limit">>,
+ <<"ha-mode">>, <<"ha-params">>, <<"ha-sync-mode">>,
+ <<"ha-promote-on-shutdown">>, <<"ha-promote-on-failure">>,
+ <<"queue-master-locator">>],
queue_arguments => [<<"x-dead-letter-exchange">>, <<"x-dead-letter-routing-key">>,
<<"x-max-length">>, <<"x-max-length-bytes">>,
<<"x-single-active-consumer">>, <<"x-queue-type">>,
- <<"x-max-age">>, <<"x-max-segment-size">>,
+ <<"x-max-age">>, <<"x-stream-max-segment-size-bytes">>,
<<"x-initial-cluster-size">>, <<"x-queue-leader-locator">>],
consumer_arguments => [<<"x-stream-offset">>],
server_named => false}.
+
+notify_decorators(Q) when ?is_amqqueue(Q) ->
+ %% Not supported
+ ok.
+
+resend_all(#stream_client{leader = LeaderPid,
+ writer_id = WriterId,
+ correlation = Corrs} = State) ->
+ Msgs = lists:sort(maps:values(Corrs)),
+ case Msgs of
+ [] -> ok;
+ [{Seq, _} | _] ->
+ rabbit_log:debug("stream client: resending from seq ~w num ~b",
+ [Seq, maps:size(Corrs)])
+ end,
+ [begin
+ ok = osiris:write(LeaderPid, WriterId, Seq, msg_to_iodata(Msg))
+ end || {Seq, Msg} <- Msgs],
+ State.
+
+set_leader_pid(Pid, QName) ->
+ Fun = fun (Q) ->
+ amqqueue:set_pid(Q, Pid)
+ end,
+ case rabbit_misc:execute_mnesia_transaction(
+ fun() ->
+ rabbit_amqqueue:update(QName, Fun)
+ end) of
+ not_found ->
+ %% This can happen during recovery
+ [Q] = mnesia:dirty_read(rabbit_durable_queue, QName),
+ rabbit_amqqueue:ensure_rabbit_queue_record_is_initialized(Fun(Q));
+ _ ->
+ ok
+ end.
+
+apply_leader_locator_strategy(#{leader_locator_strategy := <<"client-local">>} = Conf) ->
+ Conf;
+apply_leader_locator_strategy(#{leader_node := Leader,
+ replica_nodes := Replicas0,
+ leader_locator_strategy := <<"random">>,
+ name := StreamId} = Conf) ->
+ Replicas = [Leader | Replicas0],
+ ClusterSize = length(Replicas),
+ Hash = erlang:phash2(StreamId),
+ Pos = (Hash rem ClusterSize) + 1,
+ NewLeader = lists:nth(Pos, Replicas),
+ NewReplicas = lists:delete(NewLeader, Replicas),
+ Conf#{leader_node => NewLeader,
+ replica_nodes => NewReplicas};
+apply_leader_locator_strategy(#{leader_node := Leader,
+ replica_nodes := Replicas0,
+ leader_locator_strategy := <<"least-leaders">>} = Conf) ->
+ Replicas = [Leader | Replicas0],
+ Counters0 = maps:from_list([{R, 0} || R <- Replicas]),
+ Counters = maps:to_list(
+ lists:foldl(fun(Q, Acc) ->
+ P = amqqueue:get_pid(Q),
+ case amqqueue:get_type(Q) of
+ ?MODULE when is_pid(P) ->
+ maps:update_with(node(P), fun(V) -> V + 1 end, 1, Acc);
+ _ ->
+ Acc
+ end
+ end, Counters0, rabbit_amqqueue:list())),
+ Ordered = lists:sort(fun({_, V1}, {_, V2}) ->
+ V1 =< V2
+ end, Counters),
+ %% We could have potentially introduced nodes that are not in the list of replicas if
+ %% initial cluster size is smaller than the cluster size. Let's select the first one
+ %% that is on the list of replicas
+ NewLeader = select_first_matching_node(Ordered, Replicas),
+ NewReplicas = lists:delete(NewLeader, Replicas),
+ Conf#{leader_node => NewLeader,
+ replica_nodes => NewReplicas}.
+
+select_first_matching_node([{N, _} | Rest], Replicas) ->
+ case lists:member(N, Replicas) of
+ true -> N;
+ false -> select_first_matching_node(Rest, Replicas)
+ end.
diff --git a/deps/rabbit/src/rabbit_sup.erl b/deps/rabbit/src/rabbit_sup.erl
index 06643b155d..b38127b73b 100644
--- a/deps/rabbit/src/rabbit_sup.erl
+++ b/deps/rabbit/src/rabbit_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_sup).
@@ -18,7 +18,7 @@
-export([init/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-define(SERVER, ?MODULE).
diff --git a/deps/rabbit/src/rabbit_sysmon_handler.erl b/deps/rabbit/src/rabbit_sysmon_handler.erl
index 8f7298ed6e..8db36f0f68 100644
--- a/deps/rabbit/src/rabbit_sysmon_handler.erl
+++ b/deps/rabbit/src/rabbit_sysmon_handler.erl
@@ -1,5 +1,5 @@
%% Copyright (c) 2011 Basho Technologies, Inc. All Rights Reserved.
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
diff --git a/deps/rabbit/src/rabbit_sysmon_minder.erl b/deps/rabbit/src/rabbit_sysmon_minder.erl
index a0402e5ebe..c42bd0b905 100644
--- a/deps/rabbit/src/rabbit_sysmon_minder.erl
+++ b/deps/rabbit/src/rabbit_sysmon_minder.erl
@@ -1,6 +1,6 @@
%% -------------------------------------------------------------------
%% Copyright (c) 2007-2010 Basho Technologies, Inc. All Rights Reserved.
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
diff --git a/deps/rabbit/src/rabbit_table.erl b/deps/rabbit/src/rabbit_table.erl
index 77534763d0..291adc740b 100644
--- a/deps/rabbit/src/rabbit_table.erl
+++ b/deps/rabbit/src/rabbit_table.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_table).
@@ -101,16 +101,16 @@ wait(TableNames, Retry) ->
wait(TableNames, Timeout, Retries) ->
%% We might be in ctl here for offline ops, in which case we can't
%% get_env() for the rabbit app.
- rabbit_log:info("Waiting for Mnesia tables for ~p ms, ~p retries left~n",
+ rabbit_log:info("Waiting for Mnesia tables for ~p ms, ~p retries left",
[Timeout, Retries - 1]),
Result = case mnesia:wait_for_tables(TableNames, Timeout) of
ok ->
ok;
{timeout, BadTabs} ->
- AllNodes = rabbit_mnesia:cluster_nodes(all),
+ AllNodes = rabbit_nodes:all(),
{error, {timeout_waiting_for_tables, AllNodes, BadTabs}};
{error, Reason} ->
- AllNodes = rabbit_mnesia:cluster_nodes(all),
+ AllNodes = rabbit_nodes:all(),
{error, {failed_waiting_for_tables, AllNodes, Reason}}
end,
case {Retries, Result} of
@@ -120,7 +120,7 @@ wait(TableNames, Timeout, Retries) ->
{1, {error, _} = Error} ->
throw(Error);
{_, {error, Error}} ->
- rabbit_log:warning("Error while waiting for Mnesia tables: ~p~n", [Error]),
+ rabbit_log:warning("Error while waiting for Mnesia tables: ~p", [Error]),
wait(TableNames, Timeout, Retries - 1)
end.
diff --git a/deps/rabbit/src/rabbit_trace.erl b/deps/rabbit/src/rabbit_trace.erl
index 74b892330e..02d5bd97eb 100644
--- a/deps/rabbit/src/rabbit_trace.erl
+++ b/deps/rabbit/src/rabbit_trace.erl
@@ -2,15 +2,15 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_trace).
-export([init/1, enabled/1, tap_in/6, tap_out/5, start/1, stop/1]).
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
-define(TRACE_VHOSTS, trace_vhosts).
-define(XNAME, <<"amq.rabbitmq.trace">>).
@@ -74,22 +74,43 @@ tap_out({#resource{name = QName, virtual_host = VHost},
-spec start(rabbit_types:vhost()) -> 'ok'.
start(VHost) ->
- rabbit_log:info("Enabling tracing for vhost '~s'~n", [VHost]),
- update_config(fun (VHosts) -> [VHost | VHosts -- [VHost]] end).
+ case lists:member(VHost, vhosts_with_tracing_enabled()) of
+ true ->
+ rabbit_log:info("Tracing is already enabled for vhost '~s'", [VHost]),
+ ok;
+ false ->
+ rabbit_log:info("Enabling tracing for vhost '~s'", [VHost]),
+ update_config(fun (VHosts) ->
+ lists:usort([VHost | VHosts])
+ end)
+ end.
-spec stop(rabbit_types:vhost()) -> 'ok'.
stop(VHost) ->
- rabbit_log:info("Disabling tracing for vhost '~s'~n", [VHost]),
- update_config(fun (VHosts) -> VHosts -- [VHost] end).
+ case lists:member(VHost, vhosts_with_tracing_enabled()) of
+ true ->
+ rabbit_log:info("Disabling tracing for vhost '~s'", [VHost]),
+ update_config(fun (VHosts) -> VHosts -- [VHost] end);
+ false ->
+ rabbit_log:info("Tracing is already disabled for vhost '~s'", [VHost]),
+ ok
+ end.
update_config(Fun) ->
- {ok, VHosts0} = application:get_env(rabbit, ?TRACE_VHOSTS),
+ VHosts0 = vhosts_with_tracing_enabled(),
VHosts = Fun(VHosts0),
application:set_env(rabbit, ?TRACE_VHOSTS, VHosts),
- rabbit_channel:refresh_config_local(),
+ rabbit_log:debug("Will now refresh channel state after virtual host tracing changes"),
+
+ {Time, _} = timer:tc(fun rabbit_channel:refresh_config_local/0),
+ rabbit_log:debug("Refreshed channel state in ~fs", [Time/1000000]),
+
ok.
+vhosts_with_tracing_enabled() ->
+ application:get_env(rabbit, ?TRACE_VHOSTS, []).
+
%%----------------------------------------------------------------------------
trace(#exchange{name = Name}, #basic_message{exchange_name = Name},
diff --git a/deps/rabbit/src/rabbit_tracking.erl b/deps/rabbit/src/rabbit_tracking.erl
index a124d20226..88a825b373 100644
--- a/deps/rabbit/src/rabbit_tracking.erl
+++ b/deps/rabbit/src/rabbit_tracking.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracking).
@@ -52,7 +52,7 @@ count_tracked_items(TableNameFun, CountRecPosition, Key, ContextMsg) ->
Acc + N
catch _:Err ->
rabbit_log:error(
- "Failed to fetch number of ~p ~p on node ~p:~n~p~n",
+ "Failed to fetch number of ~p ~p on node ~p:~n~p",
[ContextMsg, Key, Node, Err]),
Acc
end
diff --git a/deps/rabbit/src/rabbit_upgrade.erl b/deps/rabbit/src/rabbit_upgrade.erl
index b1b128fecc..03aa6854de 100644
--- a/deps/rabbit/src/rabbit_upgrade.erl
+++ b/deps/rabbit/src/rabbit_upgrade.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_upgrade).
@@ -11,7 +11,7 @@
maybe_migrate_queues_to_per_vhost_storage/0,
nodes_running/1, secondary_upgrade/1]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-define(VERSION_FILENAME, "schema_version").
-define(LOCK_FILENAME, "schema_upgrade_lock").
@@ -91,9 +91,9 @@ ensure_backup_taken() ->
take_backup() ->
BackupDir = backup_dir(),
- info("upgrades: Backing up mnesia dir to ~p~n", [BackupDir]),
+ info("upgrades: Backing up mnesia dir to ~p", [BackupDir]),
case rabbit_mnesia:copy_db(BackupDir) of
- ok -> info("upgrades: Mnesia dir backed up to ~p~n",
+ ok -> info("upgrades: Mnesia dir backed up to ~p",
[BackupDir]);
{error, E} -> throw({could_not_back_up_mnesia_dir, E, BackupDir})
end.
@@ -106,12 +106,12 @@ ensure_backup_removed() ->
remove_backup() ->
ok = rabbit_file:recursive_delete([backup_dir()]),
- info("upgrades: Mnesia backup removed~n", []).
+ info("upgrades: Mnesia backup removed", []).
-spec maybe_upgrade_mnesia() -> 'ok'.
maybe_upgrade_mnesia() ->
- AllNodes = rabbit_mnesia:cluster_nodes(all),
+ AllNodes = rabbit_nodes:all(),
ok = rabbit_mnesia_rename:maybe_finish(AllNodes),
%% Mnesia upgrade is the first upgrade scope,
%% so we should create a backup here if there are any upgrades
@@ -216,7 +216,7 @@ primary_upgrade(Upgrades, Nodes) ->
rabbit_table:force_load(),
case Others of
[] -> ok;
- _ -> info("mnesia upgrades: Breaking cluster~n", []),
+ _ -> info("mnesia upgrades: Breaking cluster", []),
[{atomic, ok} = mnesia:del_table_copy(schema, Node)
|| Node <- Others]
end
@@ -280,16 +280,16 @@ maybe_migrate_queues_to_per_vhost_storage() ->
apply_upgrades(Scope, Upgrades, Fun) ->
ok = rabbit_file:lock_file(lock_filename()),
- info("~s upgrades: ~w to apply~n", [Scope, length(Upgrades)]),
+ info("~s upgrades: ~w to apply", [Scope, length(Upgrades)]),
rabbit_misc:ensure_ok(mnesia:start(), cannot_start_mnesia),
Fun(),
[apply_upgrade(Scope, Upgrade) || Upgrade <- Upgrades],
- info("~s upgrades: All upgrades applied successfully~n", [Scope]),
+ info("~s upgrades: All upgrades applied successfully", [Scope]),
ok = rabbit_version:record_desired_for_scope(Scope),
ok = file:delete(lock_filename()).
apply_upgrade(Scope, {M, F}) ->
- info("~s upgrades: Applying ~w:~w~n", [Scope, M, F]),
+ info("~s upgrades: Applying ~w:~w", [Scope, M, F]),
ok = apply(M, F, []).
%% -------------------------------------------------------------------
diff --git a/deps/rabbit/src/rabbit_upgrade_functions.erl b/deps/rabbit/src/rabbit_upgrade_functions.erl
index 59417c72bb..dd856c42f0 100644
--- a/deps/rabbit/src/rabbit_upgrade_functions.erl
+++ b/deps/rabbit/src/rabbit_upgrade_functions.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_upgrade_functions).
@@ -426,9 +426,9 @@ cluster_name_tx() ->
case Tl of
[] -> ok;
_ -> {VHost, _, _} = K,
- error_logger:warning_msg(
+ logger:warning(
"Multiple local-nodenames found, picking '~s' "
- "from '~s' for cluster name~n", [Name, VHost])
+ "from '~s' for cluster name", [Name, VHost])
end
end,
[mnesia:delete(T, K, write) || K <- Ks],
diff --git a/deps/rabbit/src/rabbit_upgrade_preparation.erl b/deps/rabbit/src/rabbit_upgrade_preparation.erl
index fc1de24610..b120ce3679 100644
--- a/deps/rabbit/src/rabbit_upgrade_preparation.erl
+++ b/deps/rabbit/src/rabbit_upgrade_preparation.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_upgrade_preparation).
diff --git a/deps/rabbit/src/rabbit_variable_queue.erl b/deps/rabbit/src/rabbit_variable_queue.erl
index cf6fa4a189..33fb5fefb4 100644
--- a/deps/rabbit/src/rabbit_variable_queue.erl
+++ b/deps/rabbit/src/rabbit_variable_queue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_variable_queue).
@@ -484,7 +484,7 @@ stop(VHost) ->
ok = rabbit_queue_index:stop(VHost).
start_msg_store(VHost, Refs, StartFunState) when is_list(Refs); Refs == undefined ->
- rabbit_log:info("Starting message stores for vhost '~s'~n", [VHost]),
+ rabbit_log:info("Starting message stores for vhost '~s'", [VHost]),
do_start_msg_store(VHost, ?TRANSIENT_MSG_STORE, undefined, ?EMPTY_START_FUN_STATE),
do_start_msg_store(VHost, ?PERSISTENT_MSG_STORE, Refs, StartFunState),
ok.
@@ -492,13 +492,13 @@ start_msg_store(VHost, Refs, StartFunState) when is_list(Refs); Refs == undefine
do_start_msg_store(VHost, Type, Refs, StartFunState) ->
case rabbit_vhost_msg_store:start(VHost, Type, Refs, StartFunState) of
{ok, _} ->
- rabbit_log:info("Started message store of type ~s for vhost '~s'~n", [abbreviated_type(Type), VHost]);
+ rabbit_log:info("Started message store of type ~s for vhost '~s'", [abbreviated_type(Type), VHost]);
{error, {no_such_vhost, VHost}} = Err ->
- rabbit_log:error("Failed to start message store of type ~s for vhost '~s': the vhost no longer exists!~n",
+ rabbit_log:error("Failed to start message store of type ~s for vhost '~s': the vhost no longer exists!",
[Type, VHost]),
exit(Err);
{error, Error} ->
- rabbit_log:error("Failed to start message store of type ~s for vhost '~s': ~p~n",
+ rabbit_log:error("Failed to start message store of type ~s for vhost '~s': ~p",
[Type, VHost, Error]),
exit({error, Error})
end.
@@ -1110,7 +1110,7 @@ a(State = #vqstate { q1 = Q1, q2 = Q2, delta = Delta, q3 = Q3, q4 = Q4,
%% disk). See push_alphas_to_betas/2.
true = E2 or not ED,
%% if delta has messages then q3 cannot be empty. This is enforced
- %% by paging, where min([?SEGMENT_ENTRY_COUNT, len(q3)]) messages
+ %% by paging, where min([segment_entry_count(), len(q3)]) messages
%% are always kept on RAM.
true = ED or not E3,
%% if the queue length is 0, then q3 and q4 must be empty.
@@ -2302,7 +2302,7 @@ beta_limit(Q) ->
empty -> undefined
end.
-delta_limit(?BLANK_DELTA_PATTERN(_X)) -> undefined;
+delta_limit(?BLANK_DELTA_PATTERN(_)) -> undefined;
delta_limit(#delta { start_seq_id = StartSeqId }) -> StartSeqId.
%%----------------------------------------------------------------------------
@@ -2846,7 +2846,7 @@ move_messages_to_vhost_store(Queues) ->
in_batches(MigrationBatchSize,
{rabbit_variable_queue, migrate_queue, [OldStore, NewMsgStore]},
QueuesWithTerms,
- "message_store upgrades: Migrating batch ~p of ~p queues. Out of total ~p ~n",
+ "message_store upgrades: Migrating batch ~p of ~p queues. Out of total ~p ",
"message_store upgrades: Batch ~p of ~p queues migrated ~n. ~p total left"),
log_upgrade("Message store migration finished"),
@@ -2882,7 +2882,7 @@ migrate_queue({QueueName = #resource{virtual_host = VHost, name = Name},
RecoveryTerm},
OldStore, NewStore) ->
log_upgrade_verbose(
- "Migrating messages in queue ~s in vhost ~s to per-vhost message store~n",
+ "Migrating messages in queue ~s in vhost ~s to per-vhost message store",
[Name, VHost]),
OldStoreClient = get_global_store_client(OldStore),
NewStoreClient = get_per_vhost_store_client(QueueName, NewStore),
diff --git a/deps/rabbit/src/rabbit_version.erl b/deps/rabbit/src/rabbit_version.erl
index 3f5462c7b4..c3e69eacbe 100644
--- a/deps/rabbit/src/rabbit_version.erl
+++ b/deps/rabbit/src/rabbit_version.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_version).
diff --git a/deps/rabbit/src/rabbit_vhost.erl b/deps/rabbit/src/rabbit_vhost.erl
index c8c5fc961a..29355562bb 100644
--- a/deps/rabbit/src/rabbit_vhost.erl
+++ b/deps/rabbit/src/rabbit_vhost.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_vhost).
@@ -10,12 +10,14 @@
-include_lib("rabbit_common/include/rabbit.hrl").
-include("vhost.hrl").
--export([recover/0, recover/1]).
+-export([recover/0, recover/1, read_config/1]).
-export([add/2, add/4, delete/2, exists/1, with/2, with_user_and_vhost/3, assert/1, update/2,
set_limits/2, vhost_cluster_state/1, is_running_on_all_nodes/1, await_running_on_all_nodes/2,
- list/0, count/0, list_names/0, all/0, parse_tags/1]).
+ list/0, count/0, list_names/0, all/0, all_tagged_with/1]).
+-export([parse_tags/1, update_metadata/2, tag_with/2, untag_from/2, update_tags/2, update_tags/3]).
+-export([lookup/1]).
-export([info/1, info/2, info_all/0, info_all/1, info_all/2, info_all/3]).
--export([dir/1, msg_store_dir_path/1, msg_store_dir_wildcard/0]).
+-export([dir/1, msg_store_dir_path/1, msg_store_dir_wildcard/0, config_file_path/1, ensure_config_file/1]).
-export([delete_storage/1]).
-export([vhost_down/1]).
-export([put_vhost/5]).
@@ -24,6 +26,9 @@
%% API
%%
+-type vhost_tag() :: atom() | string() | binary().
+-export_type([vhost_tag/0]).
+
recover() ->
%% Clear out remnants of old incarnation, in case we restarted
%% faster than other nodes handled DOWN messages from us.
@@ -32,7 +37,10 @@ recover() ->
rabbit_amqqueue:warn_file_limit(),
%% Prepare rabbit_semi_durable_route table
- rabbit_binding:recover(),
+ {Time, _} = timer:tc(fun() ->
+ rabbit_binding:recover()
+ end),
+ rabbit_log:debug("rabbit_binding:recover/0 completed in ~fs", [Time/1000000]),
%% rabbit_vhost_sup_sup will start the actual recovery.
%% So recovery will be run every time a vhost supervisor is restarted.
@@ -43,33 +51,92 @@ recover() ->
recover(VHost) ->
VHostDir = msg_store_dir_path(VHost),
- rabbit_log:info("Making sure data directory '~ts' for vhost '~s' exists~n",
+ rabbit_log:info("Making sure data directory '~ts' for vhost '~s' exists",
[VHostDir, VHost]),
VHostStubFile = filename:join(VHostDir, ".vhost"),
ok = rabbit_file:ensure_dir(VHostStubFile),
ok = file:write_file(VHostStubFile, VHost),
+ ok = ensure_config_file(VHost),
{Recovered, Failed} = rabbit_amqqueue:recover(VHost),
AllQs = Recovered ++ Failed,
QNames = [amqqueue:get_name(Q) || Q <- AllQs],
- ok = rabbit_binding:recover(rabbit_exchange:recover(VHost), QNames),
+ {Time, ok} = timer:tc(fun() ->
+ rabbit_binding:recover(rabbit_exchange:recover(VHost), QNames)
+ end),
+ rabbit_log:debug("rabbit_binding:recover/2 for vhost ~s completed in ~fs", [VHost, Time/1000000]),
+
ok = rabbit_amqqueue:start(Recovered),
%% Start queue mirrors.
ok = rabbit_mirror_queue_misc:on_vhost_up(VHost),
ok.
+ensure_config_file(VHost) ->
+ Path = config_file_path(VHost),
+ case filelib:is_regular(Path) of
+ %% The config file exists. Do nothing.
+ true ->
+ ok;
+ %% The config file does not exist.
+ %% Check if there are queues in this vhost.
+ false ->
+ QueueDirs = rabbit_queue_index:all_queue_directory_names(VHost),
+ SegmentEntryCount = case QueueDirs of
+ %% There are no queues. Write the configured value for
+ %% the segment entry count, or the new RabbitMQ default
+ %% introduced in v3.8.17. The new default provides much
+ %% better memory footprint when many queues are used.
+ [] ->
+ application:get_env(rabbit, queue_index_segment_entry_count,
+ 2048);
+ %% There are queues already. Write the historic RabbitMQ
+ %% default of 16384 for forward compatibility. Historic
+ %% default calculated as trunc(math:pow(2,?REL_SEQ_BITS)).
+ _ ->
+ ?LEGACY_INDEX_SEGMENT_ENTRY_COUNT
+ end,
+ rabbit_log:info("Setting segment_entry_count for vhost '~s' with ~b queues to '~b'",
+ [VHost, length(QueueDirs), SegmentEntryCount]),
+ file:write_file(Path, io_lib:format(
+ "%% This file is auto-generated! Edit at your own risk!~n"
+ "{segment_entry_count, ~b}.",
+ [SegmentEntryCount]))
+ end.
+
+read_config(VHost) ->
+ Config = case file:consult(config_file_path(VHost)) of
+ {ok, Val} -> Val;
+ %% the file does not exist yet, likely due to an upgrade from a pre-3.7
+ %% message store layout so use the history default.
+ {error, _} -> #{
+ segment_entry_count => ?LEGACY_INDEX_SEGMENT_ENTRY_COUNT
+ }
+ end,
+ rabbit_data_coercion:to_map(Config).
+
-define(INFO_KEYS, vhost:info_keys()).
-spec parse_tags(binary() | string() | atom()) -> [atom()].
parse_tags(undefined) ->
[];
-parse_tags("") ->
- [];
parse_tags(<<"">>) ->
[];
+parse_tags([]) ->
+ [];
parse_tags(Val) when is_binary(Val) ->
- parse_tags(rabbit_data_coercion:to_list(Val));
+ SVal = rabbit_data_coercion:to_list(Val),
+ [trim_tag(Tag) || Tag <- re:split(SVal, ",", [{return, list}])];
parse_tags(Val) when is_list(Val) ->
- [trim_tag(Tag) || Tag <- re:split(Val, ",", [{return, list}])].
+ case hd(Val) of
+ Bin when is_binary(Bin) ->
+ %% this is a list of binaries
+ [trim_tag(Tag) || Tag <- Val];
+ Atom when is_atom(Atom) ->
+ %% this is a list of atoms
+ [trim_tag(Tag) || Tag <- Val];
+ Int when is_integer(Int) ->
+ %% this is a string/charlist
+ [trim_tag(Tag) || Tag <- re:split(Val, ",", [{return, list}])]
+ end.
-spec add(vhost:name(), rabbit_types:username()) -> rabbit_types:ok_or_error(any()).
@@ -92,7 +159,7 @@ do_add(Name, Description, Tags, ActingUser) ->
undefined ->
rabbit_log:info("Adding vhost '~s' without a description", [Name]);
Value ->
- rabbit_log:info("Adding vhost '~s' (description: '~s')", [Name, Value])
+ rabbit_log:info("Adding vhost '~s' (description: '~s', tags: ~p)", [Name, Value, Tags])
end,
VHost = rabbit_misc:execute_mnesia_transaction(
fun () ->
@@ -139,6 +206,26 @@ do_add(Name, Description, Tags, ActingUser) ->
{error, Msg}
end.
+-spec update(vhost:name(), binary(), [atom()], rabbit_types:username()) -> rabbit_types:ok_or_error(any()).
+update(Name, Description, Tags, ActingUser) ->
+ rabbit_misc:execute_mnesia_transaction(
+ fun () ->
+ case mnesia:wread({rabbit_vhost, Name}) of
+ [] ->
+ {error, {no_such_vhost, Name}};
+ [VHost0] ->
+ VHost = vhost:merge_metadata(VHost0, #{description => Description, tags => Tags}),
+ rabbit_log:debug("Updating a virtual host record ~p", [VHost]),
+ ok = mnesia:write(rabbit_vhost, VHost, write),
+ rabbit_event:notify(vhost_updated, info(VHost)
+ ++ [{user_who_performed_action, ActingUser},
+ {description, Description},
+ {tags, Tags}]),
+ ok
+ end
+ end).
+
+
-spec delete(vhost:name(), rabbit_types:username()) -> rabbit_types:ok_or_error(any()).
delete(VHost, ActingUser) ->
@@ -147,7 +234,7 @@ delete(VHost, ActingUser) ->
%% process, which in turn results in further mnesia actions and
%% eventually the termination of that process. Exchange deletion causes
%% notifications which must be sent outside the TX
- rabbit_log:info("Deleting vhost '~s'~n", [VHost]),
+ rabbit_log:info("Deleting vhost '~s'", [VHost]),
QDelFun = fun (Q) -> rabbit_amqqueue:delete(Q, false, false, ActingUser) end,
[begin
Name = amqqueue:get_name(Q),
@@ -176,17 +263,21 @@ put_vhost(Name, Description, Tags0, Trace, Username) ->
"null" -> <<"">>;
Other -> Other
end,
+ ParsedTags = parse_tags(Tags),
+ rabbit_log:debug("Parsed tags ~p to ~p", [Tags, ParsedTags]),
Result = case exists(Name) of
- true -> ok;
- false -> add(Name, Description, parse_tags(Tags), Username),
- %% wait for up to 45 seconds for the vhost to initialise
- %% on all nodes
- case await_running_on_all_nodes(Name, 45000) of
- ok ->
- maybe_grant_full_permissions(Name, Username);
- {error, timeout} ->
- {error, timeout}
- end
+ true ->
+ update(Name, Description, ParsedTags, Username);
+ false ->
+ add(Name, Description, ParsedTags, Username),
+ %% wait for up to 45 seconds for the vhost to initialise
+ %% on all nodes
+ case await_running_on_all_nodes(Name, 45000) of
+ ok ->
+ maybe_grant_full_permissions(Name, Username);
+ {error, timeout} ->
+ {error, timeout}
+ end
end,
case Trace of
true -> rabbit_trace:start(Name);
@@ -257,7 +348,7 @@ vhost_down(VHost) ->
delete_storage(VHost) ->
VhostDir = msg_store_dir_path(VHost),
- rabbit_log:info("Deleting message store directory for vhost '~s' at '~s'~n", [VHost, VhostDir]),
+ rabbit_log:info("Deleting message store directory for vhost '~s' at '~s'", [VHost, VhostDir]),
%% Message store should be closed when vhost supervisor is closed.
case rabbit_file:recursive_delete([VhostDir]) of
ok -> ok;
@@ -313,49 +404,114 @@ list() -> list_names().
-spec all() -> [vhost:vhost()].
all() -> mnesia:dirty_match_object(rabbit_vhost, vhost:pattern_match_all()).
+-spec all_tagged_with(atom()) -> [vhost:vhost()].
+all_tagged_with(TagName) ->
+ lists:filter(
+ fun(VHost) ->
+ Meta = vhost:get_metadata(VHost),
+ case Meta of
+ #{tags := Tags} ->
+ lists:member(rabbit_data_coercion:to_atom(TagName), Tags);
+ _ -> false
+ end
+ end, all()).
+
-spec count() -> non_neg_integer().
count() ->
length(list()).
--spec with(vhost:name(), rabbit_misc:thunk(A)) -> A.
+-spec lookup(vhost:name()) -> vhost:vhost() | rabbit_types:ok_or_error(any()).
+lookup(VHostName) ->
+ case rabbit_misc:dirty_read({rabbit_vhost, VHostName}) of
+ {error, not_found} -> {error, {no_such_vhost, VHostName}};
+ {ok, Record} -> Record
+ end.
-with(VHost, Thunk) ->
+-spec with(vhost:name(), rabbit_misc:thunk(A)) -> A.
+with(VHostName, Thunk) ->
fun () ->
- case mnesia:read({rabbit_vhost, VHost}) of
- [] ->
- mnesia:abort({no_such_vhost, VHost});
- [_V] ->
- Thunk()
- end
+ case mnesia:read({rabbit_vhost, VHostName}) of
+ [] -> mnesia:abort({no_such_vhost, VHostName});
+ [_V] -> Thunk()
+ end
end.
--spec with_user_and_vhost
- (rabbit_types:username(), vhost:name(), rabbit_misc:thunk(A)) -> A.
-
-with_user_and_vhost(Username, VHost, Thunk) ->
- rabbit_misc:with_user(Username, with(VHost, Thunk)).
+-spec with_user_and_vhost(rabbit_types:username(), vhost:name(), rabbit_misc:thunk(A)) -> A.
+with_user_and_vhost(Username, VHostName, Thunk) ->
+ rabbit_misc:with_user(Username, with(VHostName, Thunk)).
%% Like with/2 but outside an Mnesia tx
-spec assert(vhost:name()) -> 'ok'.
-
-assert(VHost) -> case exists(VHost) of
- true -> ok;
- false -> throw({error, {no_such_vhost, VHost}})
- end.
+assert(VHostName) ->
+ case exists(VHostName) of
+ true -> ok;
+ false -> throw({error, {no_such_vhost, VHostName}})
+ end.
-spec update(vhost:name(), fun((vhost:vhost()) -> vhost:vhost())) -> vhost:vhost().
-
-update(VHost, Fun) ->
- case mnesia:read({rabbit_vhost, VHost}) of
+update(VHostName, Fun) ->
+ case mnesia:read({rabbit_vhost, VHostName}) of
[] ->
- mnesia:abort({no_such_vhost, VHost});
+ mnesia:abort({no_such_vhost, VHostName});
[V] ->
V1 = Fun(V),
ok = mnesia:write(rabbit_vhost, V1, write),
V1
end.
+-spec update_metadata(vhost:name(), fun((map())-> map())) -> vhost:vhost() | rabbit_types:ok_or_error(any()).
+update_metadata(VHostName, Fun) ->
+ update(VHostName, fun(Record) ->
+ Meta = Fun(vhost:get_metadata(Record)),
+ vhost:set_metadata(Record, Meta)
+ end).
+
+-spec update_tags(vhost:name(), [vhost_tag()], rabbit_types:username()) -> vhost:vhost() | rabbit_types:ok_or_error(any()).
+update_tags(VHostName, Tags, ActingUser) ->
+ ConvertedTags = [rabbit_data_coercion:to_atom(I) || I <- Tags],
+ try
+ R = rabbit_misc:execute_mnesia_transaction(fun() ->
+ update_tags(VHostName, ConvertedTags)
+ end),
+ rabbit_log:info("Successfully set tags for virtual host '~s' to ~p", [VHostName, ConvertedTags]),
+ rabbit_event:notify(vhost_tags_set, [{name, VHostName},
+ {tags, ConvertedTags},
+ {user_who_performed_action, ActingUser}]),
+ R
+ catch
+ throw:{error, {no_such_vhost, _}} = Error ->
+ rabbit_log:warning("Failed to set tags for virtual host '~s': the virtual host does not exist", [VHostName]),
+ throw(Error);
+ throw:Error ->
+ rabbit_log:warning("Failed to set tags for virtual host '~s': ~p", [VHostName, Error]),
+ throw(Error);
+ exit:Error ->
+ rabbit_log:warning("Failed to set tags for virtual host '~s': ~p", [VHostName, Error]),
+ exit(Error)
+ end.
+
+-spec update_tags(vhost:name(), [vhost_tag()]) -> vhost:vhost() | rabbit_types:ok_or_error(any()).
+update_tags(VHostName, Tags) ->
+ ConvertedTags = [rabbit_data_coercion:to_atom(I) || I <- Tags],
+ update(VHostName, fun(Record) ->
+ Meta0 = vhost:get_metadata(Record),
+ Meta = maps:update(tags, ConvertedTags, Meta0),
+ vhost:set_metadata(Record, Meta)
+ end).
+
+-spec tag_with(vhost:name(), [atom()]) -> vhost:vhost() | rabbit_types:ok_or_error(any()).
+tag_with(VHostName, Tags) when is_list(Tags) ->
+ update_metadata(VHostName, fun(#{tags := Tags0} = Meta) ->
+ maps:update(tags, lists:usort(Tags0 ++ Tags), Meta)
+ end).
+
+-spec untag_from(vhost:name(), [atom()]) -> vhost:vhost() | rabbit_types:ok_or_error(any()).
+untag_from(VHostName, Tags) when is_list(Tags) ->
+ update_metadata(VHostName, fun(#{tags := Tags0} = Meta) ->
+ maps:update(tags, lists:usort(Tags0 -- Tags), Meta)
+ end).
+
set_limits(VHost, undefined) ->
vhost:set_limits(VHost, []);
set_limits(VHost, Limits) ->
@@ -377,6 +533,10 @@ msg_store_dir_base() ->
Dir = rabbit_mnesia:dir(),
filename:join([Dir, "msg_stores", "vhosts"]).
+config_file_path(VHost) ->
+ VHostDir = msg_store_dir_path(VHost),
+ filename:join(VHostDir, ".config").
+
-spec trim_tag(list() | binary() | atom()) -> atom().
trim_tag(Val) ->
rabbit_data_coercion:to_atom(string:trim(rabbit_data_coercion:to_list(Val))).
diff --git a/deps/rabbit/src/rabbit_vhost_limit.erl b/deps/rabbit/src/rabbit_vhost_limit.erl
index bee01f3054..da8601c444 100644
--- a/deps/rabbit/src/rabbit_vhost_limit.erl
+++ b/deps/rabbit/src/rabbit_vhost_limit.erl
@@ -2,14 +2,14 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_vhost_limit).
-behaviour(rabbit_runtime_parameter).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([register/0]).
-export([parse_set/3, set/3, clear/2]).
diff --git a/deps/rabbit/src/rabbit_vhost_msg_store.erl b/deps/rabbit/src/rabbit_vhost_msg_store.erl
index 8667b4d143..32aac71da8 100644
--- a/deps/rabbit/src/rabbit_vhost_msg_store.erl
+++ b/deps/rabbit/src/rabbit_vhost_msg_store.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_vhost_msg_store).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([start/4, stop/2, client_init/5, successfully_recovered_state/2]).
-export([vhost_store_pid/2]).
diff --git a/deps/rabbit/src/rabbit_vhost_process.erl b/deps/rabbit/src/rabbit_vhost_process.erl
index cf70d49010..bf9972e1be 100644
--- a/deps/rabbit/src/rabbit_vhost_process.erl
+++ b/deps/rabbit/src/rabbit_vhost_process.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This module implements a vhost identity process.
@@ -25,7 +25,7 @@
%% use the now recommended try/catch syntax for obtaining the stack trace.
-compile(nowarn_deprecated_function).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-define(TICKTIME_RATIO, 4).
@@ -41,7 +41,7 @@ start_link(VHost) ->
init([VHost]) ->
process_flag(trap_exit, true),
- rabbit_log:debug("Recovering data for VHost ~p~n", [VHost]),
+ rabbit_log:debug("Recovering data for VHost ~p", [VHost]),
try
%% Recover the vhost data and save it to vhost registry.
ok = rabbit_vhost:recover(VHost),
diff --git a/deps/rabbit/src/rabbit_vhost_sup.erl b/deps/rabbit/src/rabbit_vhost_sup.erl
index d82d827ecf..15ece86f82 100644
--- a/deps/rabbit/src/rabbit_vhost_sup.erl
+++ b/deps/rabbit/src/rabbit_vhost_sup.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_vhost_sup).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
%% Each vhost gets an instance of this supervisor that supervises
%% message stores and queues (via rabbit_amqqueue_sup_sup).
diff --git a/deps/rabbit/src/rabbit_vhost_sup_sup.erl b/deps/rabbit/src/rabbit_vhost_sup_sup.erl
index c201237daa..2d087986d1 100644
--- a/deps/rabbit/src/rabbit_vhost_sup_sup.erl
+++ b/deps/rabbit/src/rabbit_vhost_sup_sup.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_vhost_sup_sup).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(supervisor2).
@@ -79,7 +79,7 @@ stop_and_delete_vhost(VHost) ->
false -> ok;
true ->
rabbit_log:info("Stopping vhost supervisor ~p"
- " for vhost '~s'~n",
+ " for vhost '~s'",
[VHostSupPid, VHost]),
case supervisor2:terminate_child(?MODULE, WrapperPid) of
ok ->
diff --git a/deps/rabbit/src/rabbit_vhost_sup_wrapper.erl b/deps/rabbit/src/rabbit_vhost_sup_wrapper.erl
index ed239ade69..3c1368699a 100644
--- a/deps/rabbit/src/rabbit_vhost_sup_wrapper.erl
+++ b/deps/rabbit/src/rabbit_vhost_sup_wrapper.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This module is a wrapper around vhost supervisor to
@@ -10,7 +10,7 @@
-module(rabbit_vhost_sup_wrapper).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(supervisor2).
-export([init/1]).
diff --git a/deps/rabbit/src/rabbit_vm.erl b/deps/rabbit/src/rabbit_vm.erl
index b014e090c5..f01c383b63 100644
--- a/deps/rabbit/src/rabbit_vm.erl
+++ b/deps/rabbit/src/rabbit_vm.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_vm).
@@ -231,7 +231,7 @@ conn_sups() ->
ranch_server_sups() ->
try
- ets:match(ranch_server, {{conns_sup, '_'}, '$1'})
+ [Pid || {_, _, Pid} <- ranch_server:get_connections_sups()]
catch
%% Ranch ETS table doesn't exist yet
error:badarg -> []
@@ -325,16 +325,17 @@ ra_type(PDict) ->
info_value()).
-type distinguisher() :: fun (([{term(), term()}]) -> atom()).
-type distinguishers() :: [{info_key(), distinguisher()}].
+
-spec sum_processes([process()], distinguishers(), [info_key()]) ->
{[{process(), [info_item()]}], [info_item()]}.
--spec sum_processes([process()], accumulate(), distinguishers(),
- [info_item()]) ->
- {[{process(), [info_item()]}], [info_item()]}.
sum_processes(Names, Distinguishers, Items) ->
sum_processes(Names, fun (_, X, Y) -> X + Y end, Distinguishers,
[{Item, 0} || Item <- Items]).
+-spec sum_processes([process()], accumulate(), distinguishers(),
+ [info_item()]) ->
+ {[{process(), [info_item()]}], [info_item()]}.
%% summarize the process_info of all processes based on their
%% '$ancestor' hierarchy, recorded in their process dictionary.
%%
diff --git a/deps/rabbit/src/supervised_lifecycle.erl b/deps/rabbit/src/supervised_lifecycle.erl
index 0e1bb9b5c8..4a88820ea4 100644
--- a/deps/rabbit/src/supervised_lifecycle.erl
+++ b/deps/rabbit/src/supervised_lifecycle.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% Invoke callbacks on startup and termination.
diff --git a/deps/rabbit/src/tcp_listener.erl b/deps/rabbit/src/tcp_listener.erl
index 93c24ab397..3997bc4c36 100644
--- a/deps/rabbit/src/tcp_listener.erl
+++ b/deps/rabbit/src/tcp_listener.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(tcp_listener).
@@ -44,7 +44,7 @@
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
--record(state, {on_startup, on_shutdown, label, ip, port}).
+-record(state, {on_shutdown, label, ip, port}).
%%----------------------------------------------------------------------------
@@ -63,14 +63,17 @@ start_link(IPAddress, Port,
%%--------------------------------------------------------------------
-init({IPAddress, Port, {M,F,A} = OnStartup, OnShutdown, Label}) ->
+init({IPAddress, Port, {M, F, A}, OnShutdown, Label}) ->
process_flag(trap_exit, true),
- error_logger:info_msg(
- "started ~s on ~s:~p~n",
- [Label, rabbit_misc:ntoab(IPAddress), Port]),
+ logger:info("started ~s on ~s:~p", [Label, rabbit_misc:ntoab(IPAddress), Port]),
apply(M, F, A ++ [IPAddress, Port]),
- {ok, #state{on_startup = OnStartup, on_shutdown = OnShutdown,
- label = Label, ip=IPAddress, port=Port}}.
+ State0 = #state{
+ on_shutdown = OnShutdown,
+ label = Label,
+ ip = IPAddress,
+ port = Port
+ },
+ {ok, obfuscate_state(State0)}.
handle_call(_Request, _From, State) ->
{noreply, State}.
@@ -81,10 +84,23 @@ handle_cast(_Msg, State) ->
handle_info(_Info, State) ->
{noreply, State}.
-terminate(_Reason, #state{on_shutdown = {M,F,A}, label=Label, ip=IPAddress, port=Port}) ->
- error_logger:info_msg("stopped ~s on ~s:~p~n",
- [Label, rabbit_misc:ntoab(IPAddress), Port]),
- apply(M, F, A ++ [IPAddress, Port]).
+terminate(_Reason, #state{on_shutdown = OnShutdown, label = Label, ip = IPAddress, port = Port}) ->
+ logger:info("stopped ~s on ~s:~p", [Label, rabbit_misc:ntoab(IPAddress), Port]),
+ try
+ OnShutdown(IPAddress, Port)
+ catch _:Error ->
+ logger:error("Failed to stop ~s on ~s:~p: ~p",
+ [Label, rabbit_misc:ntoab(IPAddress), Port, Error])
+ end.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
+
+obfuscate_state(#state{on_shutdown = OnShutdown} = State) ->
+ {M, F, A} = OnShutdown,
+ State#state{
+ %% avoids arguments from being logged in case of an exception
+ on_shutdown = fun(IPAddress, Port) ->
+ apply(M, F, A ++ [IPAddress, Port])
+ end
+ }. \ No newline at end of file
diff --git a/deps/rabbit/src/tcp_listener_sup.erl b/deps/rabbit/src/tcp_listener_sup.erl
index 82128bb2af..ccc26c94bc 100644
--- a/deps/rabbit/src/tcp_listener_sup.erl
+++ b/deps/rabbit/src/tcp_listener_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(tcp_listener_sup).
@@ -16,26 +16,27 @@
-behaviour(supervisor).
--export([start_link/10]).
+-export([start_link/11]).
-export([init/1]).
-type mfargs() :: {atom(), atom(), [any()]}.
-spec start_link
(inet:ip_address(), inet:port_number(), module(), [gen_tcp:listen_option()],
- module(), any(), mfargs(), mfargs(), integer(), string()) ->
+ module(), any(), mfargs(), mfargs(), integer(), integer(), string()) ->
rabbit_types:ok_pid_or_error().
start_link(IPAddress, Port, Transport, SocketOpts, ProtoSup, ProtoOpts, OnStartup, OnShutdown,
- ConcurrentAcceptorCount, Label) ->
+ ConcurrentAcceptorCount, ConcurrentConnsSups, Label) ->
supervisor:start_link(
?MODULE, {IPAddress, Port, Transport, SocketOpts, ProtoSup, ProtoOpts, OnStartup, OnShutdown,
- ConcurrentAcceptorCount, Label}).
+ ConcurrentAcceptorCount, ConcurrentConnsSups, Label}).
init({IPAddress, Port, Transport, SocketOpts, ProtoSup, ProtoOpts, OnStartup, OnShutdown,
- ConcurrentAcceptorCount, Label}) ->
+ ConcurrentAcceptorCount, ConcurrentConnsSups, Label}) ->
{ok, AckTimeout} = application:get_env(rabbit, ssl_handshake_timeout),
- MaxConnections = rabbit_misc:get_env(rabbit, connection_max, infinity),
+ MaxConnections = max_conn(rabbit_misc:get_env(rabbit, connection_max, infinity),
+ ConcurrentConnsSups),
RanchListenerOpts = #{
num_acceptors => ConcurrentAcceptorCount,
max_connections => MaxConnections,
@@ -43,7 +44,8 @@ init({IPAddress, Port, Transport, SocketOpts, ProtoSup, ProtoOpts, OnStartup, On
connection_type => supervisor,
socket_opts => [{ip, IPAddress},
{port, Port} |
- SocketOpts]
+ SocketOpts],
+ num_conns_sups => ConcurrentConnsSups
},
Flags = {one_for_all, 10, 10},
OurChildSpecStart = {tcp_listener, start_link, [IPAddress, Port, OnStartup, OnShutdown, Label]},
@@ -52,3 +54,9 @@ init({IPAddress, Port, Transport, SocketOpts, ProtoSup, ProtoOpts, OnStartup, On
Transport, RanchListenerOpts,
ProtoSup, ProtoOpts),
{ok, {Flags, [RanchChildSpec, OurChildSpec]}}.
+
+max_conn(infinity, _) ->
+ infinity;
+max_conn(Max, Sups) ->
+ %% connection_max in Ranch is per connection supervisor
+ Max div Sups.
diff --git a/deps/rabbit/src/term_to_binary_compat.erl b/deps/rabbit/src/term_to_binary_compat.erl
index 327a846d1f..ea2e2196fe 100644
--- a/deps/rabbit/src/term_to_binary_compat.erl
+++ b/deps/rabbit/src/term_to_binary_compat.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(term_to_binary_compat).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-export([term_to_binary_1/1]).
diff --git a/deps/rabbit/src/vhost.erl b/deps/rabbit/src/vhost.erl
index ca704183a0..452e8115f8 100644
--- a/deps/rabbit/src/vhost.erl
+++ b/deps/rabbit/src/vhost.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(vhost).
@@ -25,7 +25,10 @@
get_metadata/1,
get_description/1,
get_tags/1,
- set_limits/2
+ set_limits/2,
+ set_metadata/2,
+ merge_metadata/2,
+ is_tagged_with/2
]).
-define(record_version, vhost_v2).
@@ -170,3 +173,29 @@ set_limits(VHost, Value) ->
_ ->
vhost_v1:set_limits(VHost, Value)
end.
+
+-spec set_metadata(vhost(), metadata()) -> vhost().
+set_metadata(VHost, Value) ->
+ case record_version_to_use() of
+ ?record_version ->
+ VHost#vhost{metadata = Value};
+ _ ->
+ %% the field is not available, so this is a no-op
+ VHost
+ end.
+
+-spec merge_metadata(vhost(), metadata()) -> vhost().
+merge_metadata(VHost, Value) ->
+ case record_version_to_use() of
+ ?record_version ->
+ Meta0 = get_metadata(VHost),
+ NewMeta = maps:merge(Meta0, Value),
+ VHost#vhost{metadata = NewMeta};
+ _ ->
+ %% the field is not available, so this is a no-op
+ VHost
+ end.
+
+-spec is_tagged_with(vhost:vhost(), atom()) -> boolean().
+is_tagged_with(VHost, Tag) ->
+ lists:member(Tag, get_tags(VHost)).
diff --git a/deps/rabbit/src/vhost_v1.erl b/deps/rabbit/src/vhost_v1.erl
index 5b53eb148a..10cee4e08e 100644
--- a/deps/rabbit/src/vhost_v1.erl
+++ b/deps/rabbit/src/vhost_v1.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(vhost_v1).
diff --git a/deps/rabbit/test/backing_queue_SUITE.erl b/deps/rabbit/test/backing_queue_SUITE.erl
index be6004c8b9..5484d115fb 100644
--- a/deps/rabbit/test/backing_queue_SUITE.erl
+++ b/deps/rabbit/test/backing_queue_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(backing_queue_SUITE).
@@ -28,6 +28,8 @@
variable_queue_fold_msg_on_disk,
variable_queue_dropfetchwhile,
variable_queue_dropwhile_varying_ram_duration,
+ variable_queue_dropwhile_restart,
+ variable_queue_dropwhile_sync_restart,
variable_queue_fetchwhile_varying_ram_duration,
variable_queue_ack_limiting,
variable_queue_purge,
@@ -527,6 +529,7 @@ bq_queue_index(Config) ->
?MODULE, bq_queue_index1, [Config]).
bq_queue_index1(_Config) ->
+ init_queue_index(),
SegmentSize = rabbit_queue_index:next_segment_boundary(0),
TwoSegs = SegmentSize + SegmentSize,
MostOfASegment = trunc(SegmentSize*0.75),
@@ -708,6 +711,7 @@ bq_queue_recover(Config) ->
?MODULE, bq_queue_recover1, [Config]).
bq_queue_recover1(Config) ->
+ init_queue_index(),
Count = 2 * rabbit_queue_index:next_segment_boundary(0),
QName0 = queue_name(Config, <<"bq_queue_recover-q">>),
{new, Q} = rabbit_amqqueue:declare(QName0, true, false, [], none, <<"acting-user">>),
@@ -980,6 +984,87 @@ variable_queue_dropfetchwhile2(VQ0, _QName) ->
VQ5.
+variable_queue_dropwhile_restart(Config) ->
+ passed = rabbit_ct_broker_helpers:rpc(Config, 0,
+ ?MODULE, variable_queue_dropwhile_restart1, [Config]).
+
+variable_queue_dropwhile_restart1(Config) ->
+ with_fresh_variable_queue(
+ fun variable_queue_dropwhile_restart2/2,
+ ?config(variable_queue_type, Config)).
+
+variable_queue_dropwhile_restart2(VQ0, QName) ->
+ Count = 10000,
+
+ %% add messages with sequential expiry
+ VQ1 = variable_queue_publish(
+ true, 1, Count,
+ fun (N, Props) -> Props#message_properties{expiry = N} end,
+ fun erlang:term_to_binary/1, VQ0),
+
+ %% drop the first 5 messages
+ {#message_properties{expiry = 6}, VQ2} =
+ rabbit_variable_queue:dropwhile(
+ fun (#message_properties {expiry = Expiry}) -> Expiry =< 5 end, VQ1),
+
+ _VQ3 = rabbit_variable_queue:terminate(shutdown, VQ2),
+ Terms = variable_queue_read_terms(QName),
+ VQ4 = variable_queue_init(test_amqqueue(QName, true), Terms),
+
+ %% fetch 5
+ VQ5 = lists:foldl(fun (_, VQN) ->
+ {{_, _, _}, VQM} =
+ rabbit_variable_queue:fetch(false, VQN),
+ VQM
+ end, VQ4, lists:seq(6, Count)),
+
+ %% should be empty now
+ true = rabbit_variable_queue:is_empty(VQ5),
+
+ VQ5.
+
+variable_queue_dropwhile_sync_restart(Config) ->
+ passed = rabbit_ct_broker_helpers:rpc(Config, 0,
+ ?MODULE, variable_queue_dropwhile_sync_restart1, [Config]).
+
+variable_queue_dropwhile_sync_restart1(Config) ->
+ with_fresh_variable_queue(
+ fun variable_queue_dropwhile_sync_restart2/2,
+ ?config(variable_queue_type, Config)).
+
+variable_queue_dropwhile_sync_restart2(VQ0, QName) ->
+ Count = 10000,
+
+ %% add messages with sequential expiry
+ VQ1 = variable_queue_publish(
+ true, 1, Count,
+ fun (N, Props) -> Props#message_properties{expiry = N} end,
+ fun erlang:term_to_binary/1, VQ0),
+
+ %% drop the first 5 messages
+ {#message_properties{expiry = 6}, VQ2} =
+ rabbit_variable_queue:dropwhile(
+ fun (#message_properties {expiry = Expiry}) -> Expiry =< 5 end, VQ1),
+
+ %% Queue index sync.
+ VQ2b = rabbit_variable_queue:handle_pre_hibernate(VQ2),
+
+ _VQ3 = rabbit_variable_queue:terminate(shutdown, VQ2b),
+ Terms = variable_queue_read_terms(QName),
+ VQ4 = variable_queue_init(test_amqqueue(QName, true), Terms),
+
+ %% fetch 5
+ VQ5 = lists:foldl(fun (_, VQN) ->
+ {{_, _, _}, VQM} =
+ rabbit_variable_queue:fetch(false, VQN),
+ VQM
+ end, VQ4, lists:seq(6, Count)),
+
+ %% should be empty now
+ true = rabbit_variable_queue:is_empty(VQ5),
+
+ VQ5.
+
variable_queue_dropwhile_varying_ram_duration(Config) ->
passed = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, variable_queue_dropwhile_varying_ram_duration1, [Config]).
@@ -1315,6 +1400,13 @@ with_empty_test_queue(Fun) ->
{0, 0, Qi} = init_test_queue(QName),
rabbit_queue_index:delete_and_terminate(Fun(Qi, QName)).
+init_queue_index() ->
+ %% We must set the segment entry count in the process dictionary
+ %% for tests that call the queue index directly to have a correct
+ %% value.
+ put(segment_entry_count, 2048),
+ ok.
+
restart_app() ->
rabbit:stop(),
rabbit:start().
@@ -1360,9 +1452,19 @@ variable_queue_init(Q, Recover) ->
rabbit_variable_queue:init(
Q, case Recover of
true -> non_clean_shutdown;
- false -> new
+ false -> new;
+ Terms -> Terms
end, fun nop/2, fun nop/2, fun nop/1, fun nop/1).
+variable_queue_read_terms(QName) ->
+ #resource { kind = queue,
+ virtual_host = VHost,
+ name = Name } = QName,
+ <<Num:128>> = erlang:md5(<<"queue", VHost/binary, Name/binary>>),
+ DirName = rabbit_misc:format("~.36B", [Num]),
+ {ok, Terms} = rabbit_recovery_terms:read(VHost, DirName),
+ Terms.
+
publish_and_confirm(Q, Payload, Count) ->
Seqs = lists:seq(1, Count),
QTState0 = rabbit_queue_type:new(Q, rabbit_queue_type:init()),
diff --git a/deps/rabbit/test/channel_interceptor_SUITE.erl b/deps/rabbit/test/channel_interceptor_SUITE.erl
index e0a8050598..f0bfa48a63 100644
--- a/deps/rabbit/test/channel_interceptor_SUITE.erl
+++ b/deps/rabbit/test/channel_interceptor_SUITE.erl
@@ -2,13 +2,14 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(channel_interceptor_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("eunit/include/eunit.hrl").
-compile(export_all).
@@ -21,6 +22,7 @@ groups() ->
[
{non_parallel_tests, [], [
register_interceptor,
+ register_interceptor_failing_with_amqp_error,
register_failing_interceptors
]}
].
@@ -94,6 +96,55 @@ register_interceptor1(Config, Interceptor) ->
check_send_receive(Ch1, QName, <<"bar">>, <<"bar">>),
passed.
+register_interceptor_failing_with_amqp_error(Config) ->
+ passed = rabbit_ct_broker_helpers:rpc(Config, 0,
+ ?MODULE, register_interceptor_failing_with_amqp_error1,
+ [Config, dummy_interceptor]).
+
+register_interceptor_failing_with_amqp_error1(Config, Interceptor) ->
+ PredefinedChannels = rabbit_channel:list(),
+
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, 0),
+
+ [ChannelProc] = rabbit_channel:list() -- PredefinedChannels,
+
+ [{interceptors, []}] = rabbit_channel:info(ChannelProc, [interceptors]),
+
+ ok = rabbit_registry:register(channel_interceptor,
+ <<"dummy interceptor">>,
+ Interceptor),
+ [{interceptors, [{Interceptor, undefined}]}] =
+ rabbit_channel:info(ChannelProc, [interceptors]),
+
+ Q1 = <<"succeeding-q">>,
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Ch1, #'queue.declare'{queue = Q1}),
+
+ Q2 = <<"failing-q">>,
+ try
+ amqp_channel:call(Ch1, #'queue.declare'{queue = Q2})
+ catch
+ _:Reason ->
+ ?assertMatch(
+ {{shutdown, {_, _, <<"PRECONDITION_FAILED - operation not allowed">>}}, _},
+ Reason)
+ end,
+
+ Ch2 = rabbit_ct_client_helpers:open_channel(Config, 0),
+ [ChannelProc1] = rabbit_channel:list() -- PredefinedChannels,
+
+ ok = rabbit_registry:unregister(channel_interceptor,
+ <<"dummy interceptor">>),
+ [{interceptors, []}] = rabbit_channel:info(ChannelProc1, [interceptors]),
+
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Ch2, #'queue.declare'{queue = Q2}),
+
+ #'queue.delete_ok'{} = amqp_channel:call(Ch2, #'queue.delete' {queue = Q1}),
+ #'queue.delete_ok'{} = amqp_channel:call(Ch2, #'queue.delete' {queue = Q2}),
+
+ passed.
+
register_failing_interceptors(Config) ->
passed = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, register_interceptor1, [Config, failing_dummy_interceptor]).
diff --git a/deps/rabbit/test/channel_operation_timeout_SUITE.erl b/deps/rabbit/test/channel_operation_timeout_SUITE.erl
index 15e0188604..fec72132fe 100644
--- a/deps/rabbit/test/channel_operation_timeout_SUITE.erl
+++ b/deps/rabbit/test/channel_operation_timeout_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(channel_operation_timeout_SUITE).
diff --git a/deps/rabbit/test/channel_operation_timeout_test_queue.erl b/deps/rabbit/test/channel_operation_timeout_test_queue.erl
index 3190dad7a8..f647be412e 100644
--- a/deps/rabbit/test/channel_operation_timeout_test_queue.erl
+++ b/deps/rabbit/test/channel_operation_timeout_test_queue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(channel_operation_timeout_test_queue).
@@ -107,8 +107,8 @@
}).
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
-define(QUEUE, lqueue).
-define(TIMEOUT_TEST_MSG, <<"timeout_test_msg!">>).
diff --git a/deps/rabbit/test/cluster_SUITE.erl b/deps/rabbit/test/cluster_SUITE.erl
index 9df196a8ed..657882d440 100644
--- a/deps/rabbit/test/cluster_SUITE.erl
+++ b/deps/rabbit/test/cluster_SUITE.erl
@@ -2,14 +2,14 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(cluster_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
--include("include/amqqueue.hrl").
+-include("amqqueue.hrl").
-compile(export_all).
diff --git a/deps/rabbit/test/cluster_rename_SUITE.erl b/deps/rabbit/test/cluster_rename_SUITE.erl
index cdf02c9643..57594ba07b 100644
--- a/deps/rabbit/test/cluster_rename_SUITE.erl
+++ b/deps/rabbit/test/cluster_rename_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(cluster_rename_SUITE).
diff --git a/deps/rabbit/test/clustering_management_SUITE.erl b/deps/rabbit/test/clustering_management_SUITE.erl
index 550a30b511..0204672f11 100644
--- a/deps/rabbit/test/clustering_management_SUITE.erl
+++ b/deps/rabbit/test/clustering_management_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(clustering_management_SUITE).
@@ -64,8 +64,8 @@ groups() ->
suite() ->
[
- %% If a test hangs, no need to wait for 30 minutes.
- {timetrap, {minutes, 15}}
+ %% If a testcase hangs, no need to wait for 30 minutes.
+ {timetrap, {minutes, 5}}
].
%% -------------------------------------------------------------------
@@ -139,7 +139,7 @@ start_with_invalid_schema_in_path(Config) ->
end.
persistent_cluster_id(Config) ->
- case os:getenv("SECONDARY_UMBRELLA") of
+ case rabbit_ct_helpers:is_mixed_versions() of
false ->
[Rabbit, Hare] = cluster_members(Config),
ClusterIDA1 = rpc:call(Rabbit, rabbit_nodes, persistent_cluster_id, []),
diff --git a/deps/rabbit/test/config_schema_SUITE.erl b/deps/rabbit/test/config_schema_SUITE.erl
index c538736897..d5dd7460ad 100644
--- a/deps/rabbit/test/config_schema_SUITE.erl
+++ b/deps/rabbit/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbit/test/config_schema_SUITE_data/rabbit.snippets b/deps/rabbit/test/config_schema_SUITE_data/rabbit.snippets
index c6ac600dcc..e41ae8cd6d 100644
--- a/deps/rabbit/test/config_schema_SUITE_data/rabbit.snippets
+++ b/deps/rabbit/test/config_schema_SUITE_data/rabbit.snippets
@@ -72,6 +72,28 @@ ssl_options.fail_if_no_peer_cert = true",
{num_acceptors,
"num_acceptors.ssl = 1",[{rabbit,[{num_ssl_acceptors,1}]}],[]},
+ {distribution_listener,
+ "distribution.listener.interface = 192.168.0.1
+ distribution.listener.port_range.min = 25679
+ distribution.listener.port_range.max = 25679",
+ [{kernel, [
+ {inet_dist_listen_min, 25679},
+ {inet_dist_listen_max, 25679},
+ {inet_dist_use_interface, {192,168,0,1}}
+ ]}],
+ []},
+
+ {distribution_listener_ipv6,
+ "distribution.listener.interface = ::1
+ distribution.listener.port_range.min = 25679
+ distribution.listener.port_range.max = 25679",
+ [{kernel, [
+ {inet_dist_listen_min, 25679},
+ {inet_dist_listen_max, 25679},
+ {inet_dist_use_interface, {0,0,0,0,0,0,0,1}}
+ ]}],
+ []},
+
{socket_writer_gc_threshold,
"socket_writer.gc_threshold = 999666111", [{rabbit, [{writer_gc_threshold, 999666111}]}],[]},
@@ -139,6 +161,15 @@ cluster_formation.dns.hostname = discovery.eng.example.local",
[]},
{cluster_formation_ram_ignored,
"cluster_formation.node_type = ram",[],[]},
+ {cluster_formation_target_cluster_size_hint,
+ "cluster_formation.target_cluster_size_hint = 3",
+ [{rabbit, [
+ {cluster_formation, [
+ {target_cluster_size_hint, 3}
+ ]}
+ ]}],
+ []},
+
{tcp_listen_options,
"tcp_listen_options.backlog = 128
tcp_listen_options.nodelay = true
@@ -510,6 +541,14 @@ tcp_listen_options.exit_on_close = false",
]}],
[]},
+
+ {ssl_options_bypass_pem_cache,
+ "ssl_options.bypass_pem_cache = true",
+ [{ssl, [
+ {bypass_pem_cache, true}
+ ]}],
+ []},
+
{tcp_listen_options_linger_on,
"tcp_listen_options.linger.on = true
tcp_listen_options.linger.timeout = 100",
@@ -532,23 +571,22 @@ tcp_listen_options.exit_on_close = false",
{cluster_formation_randomized_startup_delay_both_values,
"cluster_formation.randomized_startup_delay_range.min = 10
cluster_formation.randomized_startup_delay_range.max = 30",
- [{rabbit, [{cluster_formation, [
- {randomized_startup_delay_range, {10, 30}}
- ]}]}],
+ [],
[]},
{cluster_formation_randomized_startup_delay_min_only,
"cluster_formation.randomized_startup_delay_range.min = 10",
- [{rabbit, [{cluster_formation, [
- {randomized_startup_delay_range, {10, 60}}
- ]}]}],
+ [],
[]},
{cluster_formation_randomized_startup_delay_max_only,
"cluster_formation.randomized_startup_delay_range.max = 30",
- [{rabbit, [{cluster_formation, [
- {randomized_startup_delay_range, {5, 30}}
- ]}]}],
+ [],
+ []},
+
+ {cluster_formation_internal_lock_retries,
+ "cluster_formation.internal_lock_retries = 10",
+ [{rabbit,[{cluster_formation,[{internal_lock_retries,10}]}]}],
[]},
{cluster_formation_dns,
@@ -612,45 +650,6 @@ credential_validator.regexp = ^abc\\d+",
{proxy_protocol_off,
"proxy_protocol = false",
[{rabbit,[{proxy_protocol,false}]}],[]},
- {log_debug_file,
- "log.file.level = debug",
- [{rabbit,[{log, [{file, [{level, debug}]}]}]}],
- []},
- {log_debug_console,
- "log.console = true
- log.console.level = debug",
- [{rabbit,[{log, [{console, [{enabled, true}, {level, debug}]}]}]}],
- []},
- {log_debug_exchange,
- "log.exchange = true
- log.exchange.level = debug",
- [{rabbit,[{log, [{exchange, [{enabled, true}, {level, debug}]}]}]}],
- []},
- {log_debug_syslog,
- "log.syslog = true
- log.syslog.level = debug",
- [{rabbit,[{log, [{syslog, [{enabled, true}, {level, debug}]}]}]}],
- []},
- {log_file_name,
- "log.file = file_name",
- [{rabbit,[{log, [{file, [{file, "file_name"}]}]}]}],
- []},
- {log_file_disabled,
- "log.file = false",
- [{rabbit,[{log, [{file, [{file, false}]}]}]}],
- []},
- {log_category_level,
- "log.connection.level = debug
- log.channel.level = error",
- [{rabbit,[{log, [{categories, [{connection, [{level, debug}]},
- {channel, [{level, error}]}]}]}]}],
- []},
- {log_category_file,
- "log.connection.file = file_name_connection
- log.channel.file = file_name_channel",
- [{rabbit,[{log, [{categories, [{connection, [{file, "file_name_connection"}]},
- {channel, [{file, "file_name_channel"}]}]}]}]}],
- []},
{default_worker_pool_size,
"default_worker_pool_size = 512",
@@ -694,67 +693,73 @@ credential_validator.regexp = ^abc\\d+",
]}],
[]},
- {log_syslog_settings,
- "log.syslog = true
- log.syslog.identity = rabbitmq
- log.syslog.facility = user
- log.syslog.multiline_mode = true
- log.syslog.ip = 10.10.10.10
- log.syslog.port = 123",
- [
- {rabbit,[{log, [{syslog, [{enabled, true}]}]}]},
- {syslog, [{app_name, "rabbitmq"},
- {facility, user},
- {multiline_mode, true},
- {dest_host, "10.10.10.10"},
- {dest_port, 123}]}
- ],
- []},
- {log_syslog_tcp,
- "log.syslog = true
- log.syslog.transport = tcp
- log.syslog.protocol = rfc5424
- log.syslog.host = syslog.my-network.com",
- [
- {rabbit,[{log, [{syslog, [{enabled, true}]}]}]},
- {syslog, [{protocol, {rfc5424, tcp}},
- {dest_host, "syslog.my-network.com"}]}
- ],
- []},
- {log_syslog_udp_default,
- "log.syslog = true
- log.syslog.protocol = rfc3164",
- [
- {rabbit,[{log, [{syslog, [{enabled, true}]}]}]},
- {syslog, [{protocol, {rfc3164, udp}}]}
- ],
- []},
- {log_syslog_tls,
- "log.syslog = true
- log.syslog.transport = tls
- log.syslog.ssl_options.cacertfile = test/config_schema_SUITE_data/certs/cacert.pem
- log.syslog.ssl_options.certfile = test/config_schema_SUITE_data/certs/cert.pem
- log.syslog.ssl_options.keyfile = test/config_schema_SUITE_data/certs/key.pem
- log.syslog.ssl_options.verify = verify_peer
- log.syslog.ssl_options.fail_if_no_peer_cert = false",
- [{rabbit, [{log, [{syslog, [{enabled, true}]}]}]},
- {syslog, [{protocol, {rfc5424, tls,
- [{verify,verify_peer},
- {fail_if_no_peer_cert,false},
- {cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
- {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
- {keyfile,"test/config_schema_SUITE_data/certs/key.pem"}]}}]}],
- []},
-
%%
%% Definitions
%%
+ %% classic configuration key, implies a local filesystem path
{definition_files, "load_definitions = test/definition_import_SUITE_data/case1.json",
[{rabbit,
[{load_definitions, "test/definition_import_SUITE_data/case1.json"}]}],
[]},
+ %% modern configuration key, local filesystem source
+ {definition_files, "definitions.import_backend = local_filesystem
+ definitions.local.path = test/definition_import_SUITE_data/case1.json",
+ [{rabbit, [
+ {definitions, [
+ {import_backend, rabbit_definitions_import_local_filesystem},
+ {local_path, "test/definition_import_SUITE_data/case1.json"}
+ ]}
+ ]}],
+ []},
+
+ %% modern configuration key, HTTPS source
+ {definition_files, "definitions.import_backend = https
+ definitions.https.url = https://rabbitmq.eng.megacorp.local/env-1/case1.json
+ definitions.tls.versions.1 = tlsv1.2
+ definitions.tls.log_level = error
+
+ definitions.tls.secure_renegotiate = true
+
+ definitions.tls.ciphers.1 = ECDHE-ECDSA-AES256-GCM-SHA384
+ definitions.tls.ciphers.2 = ECDHE-RSA-AES256-GCM-SHA384
+ definitions.tls.ciphers.3 = ECDH-ECDSA-AES256-GCM-SHA384
+ definitions.tls.ciphers.4 = ECDH-RSA-AES256-GCM-SHA384
+ definitions.tls.ciphers.5 = DHE-RSA-AES256-GCM-SHA384
+ definitions.tls.ciphers.6 = DHE-DSS-AES256-GCM-SHA384
+ definitions.tls.ciphers.7 = ECDHE-ECDSA-AES128-GCM-SHA256
+ definitions.tls.ciphers.8 = ECDHE-RSA-AES128-GCM-SHA256
+ definitions.tls.ciphers.9 = ECDH-ECDSA-AES128-GCM-SHA256
+ definitions.tls.ciphers.10 = ECDH-RSA-AES128-GCM-SHA256
+ definitions.tls.ciphers.11 = DHE-RSA-AES128-GCM-SHA256
+ definitions.tls.ciphers.12 = DHE-DSS-AES128-GCM-SHA256",
+ [{rabbit, [
+ {definitions, [
+ {import_backend, rabbit_definitions_import_https},
+ {url, "https://rabbitmq.eng.megacorp.local/env-1/case1.json"},
+ {ssl_options, [
+ {log_level, error},
+ {secure_renegotiate, true},
+ {versions, ['tlsv1.2']},
+ {ciphers, [
+ "ECDHE-ECDSA-AES256-GCM-SHA384",
+ "ECDHE-RSA-AES256-GCM-SHA384",
+ "ECDH-ECDSA-AES256-GCM-SHA384",
+ "ECDH-RSA-AES256-GCM-SHA384",
+ "DHE-RSA-AES256-GCM-SHA384",
+ "DHE-DSS-AES256-GCM-SHA384",
+ "ECDHE-ECDSA-AES128-GCM-SHA256",
+ "ECDHE-RSA-AES128-GCM-SHA256",
+ "ECDH-ECDSA-AES128-GCM-SHA256",
+ "ECDH-RSA-AES128-GCM-SHA256",
+ "DHE-RSA-AES128-GCM-SHA256",
+ "DHE-DSS-AES128-GCM-SHA256"
+ ]}
+ ]}
+ ]}]}],
+ []},
+
%%
%% Raft
%%
diff --git a/deps/rabbit/test/consumer_timeout_SUITE.erl b/deps/rabbit/test/consumer_timeout_SUITE.erl
index 468714328d..1a956ca843 100644
--- a/deps/rabbit/test/consumer_timeout_SUITE.erl
+++ b/deps/rabbit/test/consumer_timeout_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(consumer_timeout_SUITE).
diff --git a/deps/rabbit/test/crashing_queues_SUITE.erl b/deps/rabbit/test/crashing_queues_SUITE.erl
index cf88fb00f0..371914347d 100644
--- a/deps/rabbit/test/crashing_queues_SUITE.erl
+++ b/deps/rabbit/test/crashing_queues_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(crashing_queues_SUITE).
@@ -162,7 +162,8 @@ declare_racer_loop(Parent, Conn, Decl) ->
try
case amqp_connection:open_channel(Conn) of
{ok, Ch} -> amqp_channel:call(Ch, Decl);
- closing -> ok
+ closing -> ok;
+ {error, _} -> ok
end
catch
exit:_ ->
diff --git a/deps/rabbit/test/dead_lettering_SUITE.erl b/deps/rabbit/test/dead_lettering_SUITE.erl
index 4ee917aa21..4eca83c8bc 100644
--- a/deps/rabbit/test/dead_lettering_SUITE.erl
+++ b/deps/rabbit/test/dead_lettering_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% For the full spec see: https://www.rabbitmq.com/dlx.html
%%
diff --git a/deps/rabbit/test/definition_import_SUITE.erl b/deps/rabbit/test/definition_import_SUITE.erl
index ac0c18da99..ba3cb979d1 100644
--- a/deps/rabbit/test/definition_import_SUITE.erl
+++ b/deps/rabbit/test/definition_import_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(definition_import_SUITE).
@@ -15,7 +15,11 @@
all() ->
[
- {group, boot_time_import},
+ %% uses rabbit.load_definitions
+ {group, boot_time_import_using_classic_source},
+ %% uses rabbit.definitions with import_backend set to local_filesystem
+ {group, boot_time_import_using_modern_local_filesystem_source},
+ {group, boot_time_import_using_public_https_source},
{group, roundtrip},
{group, import_on_a_running_node}
].
@@ -39,10 +43,25 @@ groups() ->
import_case10,
import_case11,
import_case12,
- import_case13
+ import_case13,
+ import_case14,
+ import_case15,
+ import_case16,
+ import_case17,
+ import_case18,
+ import_case19
]},
- {boot_time_import, [], [
- import_on_a_booting_node
+
+ {boot_time_import_using_classic_source, [], [
+ import_on_a_booting_node_using_classic_local_source
+ ]},
+
+ {boot_time_import_using_modern_local_filesystem_source, [], [
+ import_on_a_booting_node_using_modern_local_filesystem_source
+ ]},
+
+ {boot_time_import_using_public_https_source, [], [
+ import_on_a_booting_node_using_public_https_source
]},
{roundtrip, [], [
@@ -62,7 +81,7 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
Config.
-init_per_group(boot_time_import = Group, Config) ->
+init_per_group(boot_time_import_using_classic_source = Group, Config) ->
CasePath = filename:join(?config(data_dir, Config), "case5.json"),
Config1 = rabbit_ct_helpers:set_config(Config, [
{rmq_nodename_suffix, Group},
@@ -73,6 +92,53 @@ init_per_group(boot_time_import = Group, Config) ->
{load_definitions, CasePath}
]}),
rabbit_ct_helpers:run_setup_steps(Config2, rabbit_ct_broker_helpers:setup_steps());
+%% same as the classic source semantically but uses a different configuration structure
+init_per_group(boot_time_import_using_modern_local_filesystem_source = Group, Config) ->
+ CasePath = filename:join(?config(data_dir, Config), "case5.json"),
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_nodename_suffix, Group},
+ {rmq_nodes_count, 1}
+ ]),
+ Config2 = rabbit_ct_helpers:merge_app_env(Config1,
+ {rabbit, [
+ {definitions, [
+ {import_backend, rabbit_definitions_import_local_filesystem},
+ {local_path, CasePath}
+ ]}
+ ]}),
+ rabbit_ct_helpers:run_setup_steps(Config2, rabbit_ct_broker_helpers:setup_steps());
+init_per_group(boot_time_import_using_public_https_source = Group, Config) ->
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_nodename_suffix, Group},
+ {rmq_nodes_count, 1}
+ ]),
+ Config2 = rabbit_ct_helpers:merge_app_env(Config1,
+ {rabbit, [
+ {definitions, [
+ {import_backend, rabbit_definitions_import_https},
+ {url, "https://gist.githubusercontent.com/michaelklishin/e73b0114728d9391425d0644304f264a/raw/f15642771f099c60b6fa93f75d46a4246bb47c45/upstream.definitions.json"},
+ {ssl_options, [
+ {log_level, error},
+ {secure_renegotiate, true},
+ {versions, ['tlsv1.2']},
+ {ciphers, [
+ "ECDHE-ECDSA-AES256-GCM-SHA384",
+ "ECDHE-RSA-AES256-GCM-SHA384",
+ "ECDH-ECDSA-AES256-GCM-SHA384",
+ "ECDH-RSA-AES256-GCM-SHA384",
+ "DHE-RSA-AES256-GCM-SHA384",
+ "DHE-DSS-AES256-GCM-SHA384",
+ "ECDHE-ECDSA-AES128-GCM-SHA256",
+ "ECDHE-RSA-AES128-GCM-SHA256",
+ "ECDH-ECDSA-AES128-GCM-SHA256",
+ "ECDH-RSA-AES128-GCM-SHA256",
+ "DHE-RSA-AES128-GCM-SHA256",
+ "DHE-DSS-AES128-GCM-SHA256"
+ ]}
+ ]}
+ ]}
+ ]}),
+ rabbit_ct_helpers:run_setup_steps(Config2, rabbit_ct_broker_helpers:setup_steps());
init_per_group(Group, Config) ->
Config1 = rabbit_ct_helpers:set_config(Config, [
{rmq_nodename_suffix, Group}
@@ -141,18 +207,92 @@ import_case13(Config) ->
Skip
end.
+import_case14(Config) -> import_file_case(Config, "case14").
+%% contains a user with tags as a list
+import_case15(Config) -> import_file_case(Config, "case15").
+%% contains a virtual host with tags
+import_case16(Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ false ->
+ case rabbit_ct_broker_helpers:enable_feature_flag(Config, virtual_host_metadata) of
+ ok ->
+ import_file_case(Config, "case16"),
+ VHost = <<"tagged">>,
+ VHostIsImported =
+ fun () ->
+ case vhost_lookup(Config, VHost) of
+ {error, {no_such_vhosts, _}} -> false;
+ _ -> true
+ end
+ end,
+ rabbit_ct_helpers:await_condition(VHostIsImported, 20000),
+ VHostRec = vhost_lookup(Config, VHost),
+ ?assertEqual(<<"A case16 description">>, vhost:get_description(VHostRec)),
+ ?assertEqual([multi_dc_replication,ab,cde], vhost:get_tags(VHostRec)),
+
+ ok;
+ Skip ->
+ Skip
+ end;
+ _ ->
+ %% skip the test in mixed version mode
+ {skip, "Should not run in mixed version environments"}
+ end.
+
+import_case17(Config) -> import_invalid_file_case(Config, "failing_case17").
+
+import_case18(Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ false ->
+ case rabbit_ct_broker_helpers:enable_feature_flag(Config, user_limits) of
+ ok ->
+ import_file_case(Config, "case18"),
+ User = <<"limited_guest">>,
+ UserIsImported =
+ fun () ->
+ case user_lookup(Config, User) of
+ {error, not_found} -> false;
+ _ -> true
+ end
+ end,
+ rabbit_ct_helpers:await_condition(UserIsImported, 20000),
+ {ok, UserRec} = user_lookup(Config, User),
+ ?assertEqual(#{<<"max-connections">> => 2}, internal_user:get_limits(UserRec)),
+ ok;
+ Skip ->
+ Skip
+ end;
+ _ ->
+ %% skip the test in mixed version mode
+ {skip, "Should not run in mixed version environments"}
+ end.
+
+import_case19(Config) -> import_invalid_file_case(Config, "failing_case19").
+
export_import_round_trip_case1(Config) ->
- %% case 6 has runtime parameters that do not depend on any plugins
- import_file_case(Config, "case6"),
- Defs = export(Config),
- import_raw(Config, rabbit_json:encode(Defs)).
+ case rabbit_ct_helpers:is_mixed_versions() of
+ false ->
+ %% case 6 has runtime parameters that do not depend on any plugins
+ import_file_case(Config, "case6"),
+ Defs = export(Config),
+ import_raw(Config, rabbit_json:encode(Defs));
+ _ ->
+ %% skip the test in mixed version mode
+ {skip, "Should not run in mixed version environments"}
+ end.
export_import_round_trip_case2(Config) ->
- import_file_case(Config, "case9", "case9a"),
- Defs = export(Config),
- import_parsed(Config, Defs).
+ case rabbit_ct_helpers:is_mixed_versions() of
+ false ->
+ import_file_case(Config, "case9", "case9a"),
+ Defs = export(Config),
+ import_parsed(Config, Defs);
+ _ ->
+ %% skip the test in mixed version mode
+ {skip, "Should not run in mixed version environments"}
+ end.
-import_on_a_booting_node(Config) ->
+import_on_a_booting_node_using_classic_local_source(Config) ->
%% see case5.json
VHost = <<"vhost2">>,
%% verify that vhost2 eventually starts
@@ -161,6 +301,23 @@ import_on_a_booting_node(Config) ->
{error, timeout} -> ct:fail("virtual host ~p was not imported on boot", [VHost])
end.
+import_on_a_booting_node_using_modern_local_filesystem_source(Config) ->
+ %% see case5.json
+ VHost = <<"vhost2">>,
+ %% verify that vhost2 eventually starts
+ case rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_vhost, await_running_on_all_nodes, [VHost, 3000]) of
+ ok -> ok;
+ {error, timeout} -> ct:fail("virtual host ~p was not imported on boot", [VHost])
+ end.
+
+import_on_a_booting_node_using_public_https_source(Config) ->
+ VHost = <<"bunny_testbed">>,
+ %% verify that virtual host eventually starts
+ case rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_vhost, await_running_on_all_nodes, [VHost, 3000]) of
+ ok -> ok;
+ {error, timeout} -> ct:fail("virtual host ~p was not imported on boot", [VHost])
+ end.
+
%%
%% Implementation
%%
@@ -255,3 +412,9 @@ run_invalid_import_case(Path) ->
queue_lookup(Config, VHost, Name) ->
rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue, lookup, [rabbit_misc:r(VHost, queue, Name)]).
+
+vhost_lookup(Config, VHost) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_vhost, lookup, [VHost]).
+
+user_lookup(Config, User) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, lookup_user, [User]).
diff --git a/deps/rabbit/test/definition_import_SUITE_data/case14.json b/deps/rabbit/test/definition_import_SUITE_data/case14.json
new file mode 100644
index 0000000000..ef75f8422c
--- /dev/null
+++ b/deps/rabbit/test/definition_import_SUITE_data/case14.json
@@ -0,0 +1,34 @@
+{
+ "vhosts": [
+ {
+ "name": "/"
+ }
+ ],
+ "exchanges": [
+ {
+ "name": "x_demo",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false
+ }
+ ],
+ "queues": [
+ {
+ "name": "q_demo",
+ "vhost": "/",
+ "durable": true,
+ "auto_delete": false
+ }
+ ],
+ "bindings": [
+ {
+ "source": "x_demo",
+ "vhost": "/",
+ "destination": "q_demo",
+ "destination_type": "queue",
+ "routing_key": "#"
+ }
+ ]
+}
diff --git a/deps/rabbit/test/definition_import_SUITE_data/case15.json b/deps/rabbit/test/definition_import_SUITE_data/case15.json
new file mode 100644
index 0000000000..337541c54d
--- /dev/null
+++ b/deps/rabbit/test/definition_import_SUITE_data/case15.json
@@ -0,0 +1,53 @@
+{
+ "bindings": [],
+ "exchanges": [],
+ "global_parameters": [
+ {
+ "name": "cluster_name",
+ "value": "rabbit@rabbitmq"
+ }
+ ],
+ "parameters": [],
+ "permissions": [
+ {
+ "configure": ".*",
+ "read": ".*",
+ "user": "guest",
+ "vhost": "/",
+ "write": ".*"
+ }
+ ],
+ "policies": [],
+ "queues": [],
+ "rabbit_version": "3.9.0",
+ "rabbitmq_version": "3.9.0",
+ "topic_permissions": [],
+ "users": [
+ {
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "name": "guest",
+ "password_hash": "BYipq3D94qlyiZVOAAYLVdN1v8H0BOrOpM9SH6ma5aB354FA",
+ "tags": "administrator"
+ },
+ {
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "name": "tagged-user",
+ "password_hash": "t/Ah03PwU/ol8vkarb+oEYpylRSBWXFAau3eXz3lrjGxtGEK",
+ "tags": [
+ "monitoring",
+ "policymaker",
+ "impersonator"
+ ]
+ }
+ ],
+ "vhosts": [
+ {
+ "limits": [],
+ "metadata": {
+ "description": "Default virtual host",
+ "tags": []
+ },
+ "name": "/"
+ }
+ ]
+}
diff --git a/deps/rabbit/test/definition_import_SUITE_data/case16.json b/deps/rabbit/test/definition_import_SUITE_data/case16.json
new file mode 100644
index 0000000000..c313b72a33
--- /dev/null
+++ b/deps/rabbit/test/definition_import_SUITE_data/case16.json
@@ -0,0 +1,58 @@
+{
+ "bindings": [],
+ "exchanges": [],
+ "global_parameters": [
+ {
+ "name": "cluster_name",
+ "value": "rabbitmq@localhost"
+ }
+ ],
+ "parameters": [],
+ "permissions": [
+ {
+ "configure": ".*",
+ "read": ".*",
+ "user": "guest",
+ "vhost": "/",
+ "write": ".*"
+ }
+ ],
+ "policies": [],
+ "queues": [],
+ "rabbit_version": "3.9.1",
+ "rabbitmq_version": "3.9.1",
+ "topic_permissions": [],
+ "users": [
+ {
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "limits": {},
+ "name": "guest",
+ "password_hash": "wS4AT3B4Z5RpWlFn1FA30osf2C75D7WA3gem591ACDZ6saO6",
+ "tags": [
+ "administrator"
+ ]
+ }
+ ],
+ "vhosts": [
+ {
+ "limits": [],
+ "metadata": {
+ "description": "Default virtual host",
+ "tags": []
+ },
+ "name": "/"
+ },
+ {
+ "limits": [],
+ "metadata": {
+ "description": "A case16 description",
+ "tags": [
+ "multi_dc_replication",
+ "ab",
+ "cde"
+ ]
+ },
+ "name": "tagged"
+ }
+ ]
+}
diff --git a/deps/rabbit/test/definition_import_SUITE_data/case18.json b/deps/rabbit/test/definition_import_SUITE_data/case18.json
new file mode 100644
index 0000000000..9e0f755beb
--- /dev/null
+++ b/deps/rabbit/test/definition_import_SUITE_data/case18.json
@@ -0,0 +1,46 @@
+{
+ "bindings": [],
+ "exchanges": [],
+ "global_parameters": [
+ {
+ "name": "cluster_name",
+ "value": "rabbitmq@localhost"
+ }
+ ],
+ "parameters": [],
+ "permissions": [
+ {
+ "configure": ".*",
+ "read": ".*",
+ "user": "guest",
+ "vhost": "/",
+ "write": ".*"
+ }
+ ],
+ "policies": [],
+ "queues": [],
+ "rabbit_version": "3.9.1",
+ "rabbitmq_version": "3.9.1",
+ "topic_permissions": [],
+ "users": [
+ {
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "limits": {"max-connections" : 2},
+ "name": "limited_guest",
+ "password_hash": "wS4AT3B4Z5RpWlFn1FA30osf2C75D7WA3gem591ACDZ6saO6",
+ "tags": [
+ "administrator"
+ ]
+ }
+ ],
+ "vhosts": [
+ {
+ "limits": [],
+ "name": "/"
+ },
+ {
+ "limits": [],
+ "name": "tagged"
+ }
+ ]
+}
diff --git a/deps/rabbit/test/definition_import_SUITE_data/case9/case9a.json b/deps/rabbit/test/definition_import_SUITE_data/case9/case9a.json
index 2e7a77962d..07f9bba7bf 100644
--- a/deps/rabbit/test/definition_import_SUITE_data/case9/case9a.json
+++ b/deps/rabbit/test/definition_import_SUITE_data/case9/case9a.json
@@ -1 +1,447 @@
-{"rabbit_version":"3.7.13","users":[{"name":"langohr","password_hash":"7p9PXlsYs92NlHSdNgPoDXmN77NqeGpzCTHpElq/wPS1eAEd","hashing_algorithm":"rabbit_password_hashing_sha256","tags":""},{"name":"bunny_reader","password_hash":"ExmGdjBTmQEPxcW2z+dsOuPvjFbTBiYQgMByzfpE/IIXplYG","hashing_algorithm":"rabbit_password_hashing_sha256","tags":""},{"name":"bunny_gem","password_hash":"8HH7uxmZS3FDldlYmHpFEE5+gWaeQaim8qpWIHkmNxuQK8xO","hashing_algorithm":"rabbit_password_hashing_sha256","tags":""},{"name":"guest","password_hash":"CPCbkNAHXgQ7vmrqwP9e7RWQsE8U2DqN7JA4ggS50c4LwDda","hashing_algorithm":"rabbit_password_hashing_sha256","tags":"administrator"},{"name":"temp-user","password_hash":"CfUQkDeOYDrPkACDCjoF5zySbsXPIoMgNfv7FWfEpVFGegnL","hashing_algorithm":"rabbit_password_hashing_sha256","tags":"management"}],"vhosts":[{"name":"langohr_testbed"},{"name":"bunny_testbed"},{"name":"/"}],"permissions":[{"user":"bunny_reader","vhost":"bunny_testbed","configure":"^---$","write":"^---$","read":".*"},{"user":"bunny_gem","vhost":"bunny_testbed","configure":".*","write":".*","read":".*"},{"user":"guest","vhost":"/","configure":".*","write":".*","read":".*"},{"user":"langohr","vhost":"langohr_testbed","configure":".*","write":".*","read":".*"},{"user":"guest","vhost":"bunny_testbed","configure":".*","write":".*","read":".*"},{"user":"guest","vhost":"langohr_testbed","configure":".*","write":".*","read":".*"},{"user":"langohr","vhost":"/","configure":".*","write":".*","read":".*"},{"user":"temp-user","vhost":"/","configure":".*","write":".*","read":".*"}],"topic_permissions":[],"parameters":[],"global_parameters":[{"name":"cluster_name","value":"rabbit@localhost"}],"policies":[],"queues":[{"name":"bunny.basic_consume0.1364356981103202","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"return","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"q1","vhost":"/","durable":true,"auto_delete":false,"arguments":{}},{"name":"declareArgs-deliveries-dead-letter","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test.rabbitmq-basic-nack","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"langohr.test.recovery.q1","vhost":"/","durable":true,"auto_delete":true,"arguments":{}},{"name":"langohr.tests2.queues.client-named.durable.non-exclusive.non-auto-deleted","vhost":"/","durable":true,"auto_delete":false,"arguments":{}},{"name":"test.tx.rollback","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test-integration-declared-passive-queue","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"langohr.tests2.queues.client-named.non-durable.non-exclusive.auto-deleted","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test.recover","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"amq.gen-7EZF7WjGIQFDoXexVF-e8w","vhost":"/","durable":false,"auto_delete":true,"arguments":{"x-message-ttl":1500}},{"name":"test.integration.channel.error","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"confirm","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test.rabbitmq-message-ttl","vhost":"/","durable":false,"auto_delete":true,"arguments":{"x-message-ttl":100}},{"name":"declareWithTTL","vhost":"/","durable":false,"auto_delete":true,"arguments":{"x-message-ttl":9000000}},{"name":"test.tx.commit","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test.get-ok","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"langohr.tests2.queues.non-auto-deleted1","vhost":"/","durable":false,"auto_delete":true,"arguments":{}}],"exchanges":[{"name":"declareArgs-dead-letter","vhost":"/","type":"fanout","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic5","vhost":"/","type":"topic","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.extensions.altexchanges.direct1","vhost":"/","type":"direct","durable":false,"auto_delete":true,"internal":false,"arguments":{"alternate-exchange":"langohr.extensions.altexchanges.fanout1"}},{"name":"langohr.tests.exchanges.fanout1","vhost":"/","type":"fanout","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.direct3","vhost":"/","type":"direct","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic4","vhost":"/","type":"topic","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.fanout3","vhost":"/","type":"fanout","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.direct4","vhost":"/","type":"direct","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic2","vhost":"/","type":"topic","durable":true,"auto_delete":false,"internal":false,"arguments":{}},{"name":"test-integration-declared-passive-exchange","vhost":"/","type":"direct","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"test-channel-still-exists","vhost":"/","type":"direct","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic1","vhost":"/","type":"topic","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.fanout2","vhost":"/","type":"fanout","durable":true,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.direct1","vhost":"/","type":"direct","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.direct2","vhost":"/","type":"direct","durable":true,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.headers2","vhost":"/","type":"headers","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic3","vhost":"/","type":"topic","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.test.recovery.fanout1","vhost":"/","type":"fanout","durable":true,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.fanout4","vhost":"/","type":"fanout","durable":false,"auto_delete":false,"internal":false,"arguments":{}}],"bindings":[{"source":"amq.fanout","vhost":"/","destination":"langohr.tests2.queues.client-named.non-durable.non-exclusive.auto-deleted","destination_type":"queue","routing_key":"","arguments":{}},{"source":"declareArgs-dead-letter","vhost":"/","destination":"declareArgs-deliveries-dead-letter","destination_type":"queue","routing_key":"#","arguments":{}}]} \ No newline at end of file
+{
+ "rabbit_version": "3.7.13",
+ "users": [
+ {
+ "name": "langohr",
+ "password_hash": "7p9PXlsYs92NlHSdNgPoDXmN77NqeGpzCTHpElq/wPS1eAEd",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": ""
+ },
+ {
+ "name": "bunny_reader",
+ "password_hash": "ExmGdjBTmQEPxcW2z+dsOuPvjFbTBiYQgMByzfpE/IIXplYG",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": ""
+ },
+ {
+ "name": "bunny_gem",
+ "password_hash": "8HH7uxmZS3FDldlYmHpFEE5+gWaeQaim8qpWIHkmNxuQK8xO",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": ""
+ },
+ {
+ "name": "guest",
+ "password_hash": "CPCbkNAHXgQ7vmrqwP9e7RWQsE8U2DqN7JA4ggS50c4LwDda",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": "administrator"
+ },
+ {
+ "name": "temp-user",
+ "password_hash": "CfUQkDeOYDrPkACDCjoF5zySbsXPIoMgNfv7FWfEpVFGegnL",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": "management"
+ }
+ ],
+ "vhosts": [
+ {
+ "name": "langohr_testbed"
+ },
+ {
+ "name": "bunny_testbed"
+ },
+ {
+ "name": "/"
+ }
+ ],
+ "permissions": [
+ {
+ "user": "bunny_reader",
+ "vhost": "bunny_testbed",
+ "configure": "^---$",
+ "write": "^---$",
+ "read": ".*"
+ },
+ {
+ "user": "bunny_gem",
+ "vhost": "bunny_testbed",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "guest",
+ "vhost": "/",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "langohr",
+ "vhost": "langohr_testbed",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "guest",
+ "vhost": "bunny_testbed",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "guest",
+ "vhost": "langohr_testbed",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "langohr",
+ "vhost": "/",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "temp-user",
+ "vhost": "/",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ }
+ ],
+ "topic_permissions": [],
+ "parameters": [],
+ "global_parameters": [
+ {
+ "name": "cluster_name",
+ "value": "rabbit@localhost"
+ }
+ ],
+ "policies": [],
+ "queues": [
+ {
+ "name": "bunny.basic_consume0.1364356981103202",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "return",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "q1",
+ "vhost": "/",
+ "durable": true,
+ "auto_delete": false,
+ "arguments": {}
+ },
+ {
+ "name": "declareArgs-deliveries-dead-letter",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test.rabbitmq-basic-nack",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.test.recovery.q1",
+ "vhost": "/",
+ "durable": true,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests2.queues.client-named.durable.non-exclusive.non-auto-deleted",
+ "vhost": "/",
+ "durable": true,
+ "auto_delete": false,
+ "arguments": {}
+ },
+ {
+ "name": "test.tx.rollback",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test-integration-declared-passive-queue",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests2.queues.client-named.non-durable.non-exclusive.auto-deleted",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test.recover",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "amq.gen-7EZF7WjGIQFDoXexVF-e8w",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {
+ "x-message-ttl": 1500
+ }
+ },
+ {
+ "name": "test.integration.channel.error",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "confirm",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test.rabbitmq-message-ttl",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {
+ "x-message-ttl": 100
+ }
+ },
+ {
+ "name": "declareWithTTL",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {
+ "x-message-ttl": 9000000
+ }
+ },
+ {
+ "name": "test.tx.commit",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test.get-ok",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests2.queues.non-auto-deleted1",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ }
+ ],
+ "exchanges": [
+ {
+ "name": "declareArgs-dead-letter",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic5",
+ "vhost": "/",
+ "type": "topic",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.extensions.altexchanges.direct1",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {
+ "alternate-exchange": "langohr.extensions.altexchanges.fanout1"
+ }
+ },
+ {
+ "name": "langohr.tests.exchanges.fanout1",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.direct3",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic4",
+ "vhost": "/",
+ "type": "topic",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.fanout3",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.direct4",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic2",
+ "vhost": "/",
+ "type": "topic",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "test-integration-declared-passive-exchange",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "test-channel-still-exists",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic1",
+ "vhost": "/",
+ "type": "topic",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.fanout2",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.direct1",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.direct2",
+ "vhost": "/",
+ "type": "direct",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.headers2",
+ "vhost": "/",
+ "type": "headers",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic3",
+ "vhost": "/",
+ "type": "topic",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.test.recovery.fanout1",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.fanout4",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ }
+ ],
+ "bindings": [
+ {
+ "source": "amq.fanout",
+ "vhost": "/",
+ "destination": "langohr.tests2.queues.client-named.non-durable.non-exclusive.auto-deleted",
+ "destination_type": "queue",
+ "routing_key": "",
+ "arguments": {}
+ },
+ {
+ "source": "declareArgs-dead-letter",
+ "vhost": "/",
+ "destination": "declareArgs-deliveries-dead-letter",
+ "destination_type": "queue",
+ "routing_key": "#",
+ "arguments": {}
+ }
+ ]
+} \ No newline at end of file
diff --git a/deps/rabbit/test/definition_import_SUITE_data/case9/case9b.json b/deps/rabbit/test/definition_import_SUITE_data/case9/case9b.json
index 7cadd58b17..9cfc749776 100644
--- a/deps/rabbit/test/definition_import_SUITE_data/case9/case9b.json
+++ b/deps/rabbit/test/definition_import_SUITE_data/case9/case9b.json
@@ -1 +1,586 @@
-{"rabbit_version":"3.7.13","users":[{"name":"langohr","password_hash":"7p9PXlsYs92NlHSdNgPoDXmN77NqeGpzCTHpElq/wPS1eAEd","hashing_algorithm":"rabbit_password_hashing_sha256","tags":""},{"name":"bunny_reader","password_hash":"ExmGdjBTmQEPxcW2z+dsOuPvjFbTBiYQgMByzfpE/IIXplYG","hashing_algorithm":"rabbit_password_hashing_sha256","tags":""},{"name":"bunny_gem","password_hash":"8HH7uxmZS3FDldlYmHpFEE5+gWaeQaim8qpWIHkmNxuQK8xO","hashing_algorithm":"rabbit_password_hashing_sha256","tags":""},{"name":"guest2","password_hash":"E04A7cvvsaDJBezc3Sc2jCnywe9oS4DX18qFe4dwkjIr26gf","hashing_algorithm":"rabbit_password_hashing_sha256","tags":"monitoring"},{"name":"guest","password_hash":"CPCbkNAHXgQ7vmrqwP9e7RWQsE8U2DqN7JA4ggS50c4LwDda","hashing_algorithm":"rabbit_password_hashing_sha256","tags":"administrator"},{"name":"temp-user","password_hash":"CfUQkDeOYDrPkACDCjoF5zySbsXPIoMgNfv7FWfEpVFGegnL","hashing_algorithm":"rabbit_password_hashing_sha256","tags":"management"}],"vhosts":[{"name":"langohr_testbed"},{"name":"bunny_testbed"},{"name":"/"},{"name":"vhost3"}],"permissions":[{"user":"bunny_reader","vhost":"bunny_testbed","configure":"^---$","write":"^---$","read":".*"},{"user":"bunny_gem","vhost":"bunny_testbed","configure":".*","write":".*","read":".*"},{"user":"guest","vhost":"/","configure":".*","write":".*","read":".*"},{"user":"langohr","vhost":"langohr_testbed","configure":".*","write":".*","read":".*"},{"user":"guest","vhost":"bunny_testbed","configure":".*","write":".*","read":".*"},{"user":"guest","vhost":"langohr_testbed","configure":".*","write":".*","read":".*"},{"user":"guest","vhost":"vhost3","configure":".*","write":".*","read":".*"},{"user":"langohr","vhost":"/","configure":".*","write":".*","read":".*"},{"user":"temp-user","vhost":"/","configure":".*","write":".*","read":".*"}],"topic_permissions":[],"parameters":[{"value":{"max-connections":2000},"vhost":"/","component":"vhost-limits","name":"limits"}],"global_parameters":[{"name":"cluster_name","value":"rabbit@localhost"}],"policies":[],"queues":[{"name":"bunny.basic_consume0.7103611911099639","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.6091120557781405","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.8661861002262826","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.3682573609392056","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.14855593896585362","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.9534242141484872","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.9434723539955824","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.12235844522013617","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.8370997977912426","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.4548488370639835","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.2289868670635532","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.00797124769641977","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"amq.gen-xddEPq9wHSNZKQbPK8pi3A","vhost":"bunny_testbed","durable":false,"auto_delete":false,"arguments":{}},{"name":"bunny.basic_consume0.5195700828676673","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"bunny.basic_consume0.3071859764599716","vhost":"bunny_testbed","durable":false,"auto_delete":true,"arguments":{}},{"name":"return","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"q1","vhost":"/","durable":true,"auto_delete":false,"arguments":{}},{"name":"declareArgs-deliveries-dead-letter","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test.rabbitmq-basic-nack","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"langohr.test.recovery.q1","vhost":"/","durable":true,"auto_delete":true,"arguments":{}},{"name":"langohr.tests2.queues.client-named.durable.non-exclusive.non-auto-deleted","vhost":"/","durable":true,"auto_delete":false,"arguments":{}},{"name":"test.tx.rollback","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test-integration-declared-passive-queue","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"langohr.tests2.queues.client-named.non-durable.non-exclusive.auto-deleted","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test.recover","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"amq.gen-7EZF7WjGIQFDoXexVF-e8w","vhost":"/","durable":false,"auto_delete":true,"arguments":{"x-message-ttl":1500}},{"name":"test.integration.channel.error","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"confirm","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test.rabbitmq-message-ttl","vhost":"/","durable":false,"auto_delete":true,"arguments":{"x-message-ttl":100}},{"name":"declareWithTTL","vhost":"/","durable":false,"auto_delete":true,"arguments":{"x-message-ttl":9000000}},{"name":"test.tx.commit","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"test.get-ok","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"langohr.tests2.queues.non-auto-deleted1","vhost":"/","durable":false,"auto_delete":true,"arguments":{}},{"name":"qv3","vhost":"vhost3","durable":true,"auto_delete":false,"arguments":{}}],"exchanges":[{"name":"bunny.tests.exchanges.fanout","vhost":"bunny_testbed","type":"fanout","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"declareArgs-dead-letter","vhost":"/","type":"fanout","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic5","vhost":"/","type":"topic","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.extensions.altexchanges.direct1","vhost":"/","type":"direct","durable":false,"auto_delete":true,"internal":false,"arguments":{"alternate-exchange":"langohr.extensions.altexchanges.fanout1"}},{"name":"langohr.tests.exchanges.fanout1","vhost":"/","type":"fanout","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.direct3","vhost":"/","type":"direct","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic4","vhost":"/","type":"topic","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.test.recovery.fanout2","vhost":"/","type":"fanout","durable":true,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.fanout3","vhost":"/","type":"fanout","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.direct4","vhost":"/","type":"direct","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic2","vhost":"/","type":"topic","durable":true,"auto_delete":false,"internal":false,"arguments":{}},{"name":"test-integration-declared-passive-exchange","vhost":"/","type":"direct","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"test-channel-still-exists","vhost":"/","type":"direct","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic1","vhost":"/","type":"topic","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.fanout2","vhost":"/","type":"fanout","durable":true,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.direct1","vhost":"/","type":"direct","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.direct2","vhost":"/","type":"direct","durable":true,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.headers2","vhost":"/","type":"headers","durable":false,"auto_delete":false,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.topic3","vhost":"/","type":"topic","durable":false,"auto_delete":true,"internal":false,"arguments":{}},{"name":"langohr.tests.exchanges.fanout4","vhost":"/","type":"fanout","durable":false,"auto_delete":false,"internal":false,"arguments":{}}],"bindings":[{"source":"amq.fanout","vhost":"/","destination":"langohr.tests2.queues.client-named.non-durable.non-exclusive.auto-deleted","destination_type":"queue","routing_key":"","arguments":{}},{"source":"declareArgs-dead-letter","vhost":"/","destination":"declareArgs-deliveries-dead-letter","destination_type":"queue","routing_key":"#","arguments":{}}]} \ No newline at end of file
+{
+ "rabbit_version": "3.7.13",
+ "users": [
+ {
+ "name": "langohr",
+ "password_hash": "7p9PXlsYs92NlHSdNgPoDXmN77NqeGpzCTHpElq/wPS1eAEd",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": ""
+ },
+ {
+ "name": "bunny_reader",
+ "password_hash": "ExmGdjBTmQEPxcW2z+dsOuPvjFbTBiYQgMByzfpE/IIXplYG",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": ""
+ },
+ {
+ "name": "bunny_gem",
+ "password_hash": "8HH7uxmZS3FDldlYmHpFEE5+gWaeQaim8qpWIHkmNxuQK8xO",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": ""
+ },
+ {
+ "name": "guest2",
+ "password_hash": "E04A7cvvsaDJBezc3Sc2jCnywe9oS4DX18qFe4dwkjIr26gf",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": "monitoring"
+ },
+ {
+ "name": "guest",
+ "password_hash": "CPCbkNAHXgQ7vmrqwP9e7RWQsE8U2DqN7JA4ggS50c4LwDda",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": "administrator"
+ },
+ {
+ "name": "temp-user",
+ "password_hash": "CfUQkDeOYDrPkACDCjoF5zySbsXPIoMgNfv7FWfEpVFGegnL",
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "tags": "management"
+ }
+ ],
+ "vhosts": [
+ {
+ "name": "langohr_testbed"
+ },
+ {
+ "name": "bunny_testbed"
+ },
+ {
+ "name": "/"
+ },
+ {
+ "name": "vhost3"
+ }
+ ],
+ "permissions": [
+ {
+ "user": "bunny_reader",
+ "vhost": "bunny_testbed",
+ "configure": "^---$",
+ "write": "^---$",
+ "read": ".*"
+ },
+ {
+ "user": "bunny_gem",
+ "vhost": "bunny_testbed",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "guest",
+ "vhost": "/",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "langohr",
+ "vhost": "langohr_testbed",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "guest",
+ "vhost": "bunny_testbed",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "guest",
+ "vhost": "langohr_testbed",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "guest",
+ "vhost": "vhost3",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "langohr",
+ "vhost": "/",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ },
+ {
+ "user": "temp-user",
+ "vhost": "/",
+ "configure": ".*",
+ "write": ".*",
+ "read": ".*"
+ }
+ ],
+ "topic_permissions": [],
+ "parameters": [
+ {
+ "value": {
+ "max-connections": 2000
+ },
+ "vhost": "/",
+ "component": "vhost-limits",
+ "name": "limits"
+ }
+ ],
+ "global_parameters": [
+ {
+ "name": "cluster_name",
+ "value": "rabbit@localhost"
+ }
+ ],
+ "policies": [],
+ "queues": [
+ {
+ "name": "bunny.basic_consume0.7103611911099639",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.6091120557781405",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.8661861002262826",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.3682573609392056",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.14855593896585362",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.9534242141484872",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.9434723539955824",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.12235844522013617",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.8370997977912426",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.4548488370639835",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.2289868670635532",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.00797124769641977",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "amq.gen-xddEPq9wHSNZKQbPK8pi3A",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": false,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.5195700828676673",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "bunny.basic_consume0.3071859764599716",
+ "vhost": "bunny_testbed",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "return",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "q1",
+ "vhost": "/",
+ "durable": true,
+ "auto_delete": false,
+ "arguments": {}
+ },
+ {
+ "name": "declareArgs-deliveries-dead-letter",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test.rabbitmq-basic-nack",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.test.recovery.q1",
+ "vhost": "/",
+ "durable": true,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests2.queues.client-named.durable.non-exclusive.non-auto-deleted",
+ "vhost": "/",
+ "durable": true,
+ "auto_delete": false,
+ "arguments": {}
+ },
+ {
+ "name": "test.tx.rollback",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test-integration-declared-passive-queue",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests2.queues.client-named.non-durable.non-exclusive.auto-deleted",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test.recover",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "amq.gen-7EZF7WjGIQFDoXexVF-e8w",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {
+ "x-message-ttl": 1500
+ }
+ },
+ {
+ "name": "test.integration.channel.error",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "confirm",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test.rabbitmq-message-ttl",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {
+ "x-message-ttl": 100
+ }
+ },
+ {
+ "name": "declareWithTTL",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {
+ "x-message-ttl": 9000000
+ }
+ },
+ {
+ "name": "test.tx.commit",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "test.get-ok",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests2.queues.non-auto-deleted1",
+ "vhost": "/",
+ "durable": false,
+ "auto_delete": true,
+ "arguments": {}
+ },
+ {
+ "name": "qv3",
+ "vhost": "vhost3",
+ "durable": true,
+ "auto_delete": false,
+ "arguments": {}
+ }
+ ],
+ "exchanges": [
+ {
+ "name": "bunny.tests.exchanges.fanout",
+ "vhost": "bunny_testbed",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "declareArgs-dead-letter",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic5",
+ "vhost": "/",
+ "type": "topic",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.extensions.altexchanges.direct1",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {
+ "alternate-exchange": "langohr.extensions.altexchanges.fanout1"
+ }
+ },
+ {
+ "name": "langohr.tests.exchanges.fanout1",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.direct3",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic4",
+ "vhost": "/",
+ "type": "topic",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.test.recovery.fanout2",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.fanout3",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.direct4",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic2",
+ "vhost": "/",
+ "type": "topic",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "test-integration-declared-passive-exchange",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "test-channel-still-exists",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic1",
+ "vhost": "/",
+ "type": "topic",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.fanout2",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.direct1",
+ "vhost": "/",
+ "type": "direct",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.direct2",
+ "vhost": "/",
+ "type": "direct",
+ "durable": true,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.headers2",
+ "vhost": "/",
+ "type": "headers",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.topic3",
+ "vhost": "/",
+ "type": "topic",
+ "durable": false,
+ "auto_delete": true,
+ "internal": false,
+ "arguments": {}
+ },
+ {
+ "name": "langohr.tests.exchanges.fanout4",
+ "vhost": "/",
+ "type": "fanout",
+ "durable": false,
+ "auto_delete": false,
+ "internal": false,
+ "arguments": {}
+ }
+ ],
+ "bindings": [
+ {
+ "source": "amq.fanout",
+ "vhost": "/",
+ "destination": "langohr.tests2.queues.client-named.non-durable.non-exclusive.auto-deleted",
+ "destination_type": "queue",
+ "routing_key": "",
+ "arguments": {}
+ },
+ {
+ "source": "declareArgs-dead-letter",
+ "vhost": "/",
+ "destination": "declareArgs-deliveries-dead-letter",
+ "destination_type": "queue",
+ "routing_key": "#",
+ "arguments": {}
+ }
+ ]
+} \ No newline at end of file
diff --git a/deps/rabbit/test/definition_import_SUITE_data/failing_case17.json b/deps/rabbit/test/definition_import_SUITE_data/failing_case17.json
new file mode 100644
index 0000000000..4776408833
--- /dev/null
+++ b/deps/rabbit/test/definition_import_SUITE_data/failing_case17.json
@@ -0,0 +1,19 @@
+{
+ "vhosts": [
+ {
+ "name": "\/"
+ }
+ ],
+ "policies": [
+ {
+ "vhost": "\/",
+ "pattern": "^project-nd-ns-",
+ "apply-to": "queues",
+ "definition": {
+ "expires": 120000,
+ "max-length": 10000
+ },
+ "priority": 1
+ }
+ ]
+}
diff --git a/deps/rabbit/test/definition_import_SUITE_data/failing_case19.json b/deps/rabbit/test/definition_import_SUITE_data/failing_case19.json
new file mode 100644
index 0000000000..ab9d355538
--- /dev/null
+++ b/deps/rabbit/test/definition_import_SUITE_data/failing_case19.json
@@ -0,0 +1,46 @@
+{
+ "bindings": [],
+ "exchanges": [],
+ "global_parameters": [
+ {
+ "name": "cluster_name",
+ "value": "rabbitmq@localhost"
+ }
+ ],
+ "parameters": [],
+ "permissions": [
+ {
+ "configure": ".*",
+ "read": ".*",
+ "user": "guest",
+ "vhost": "/",
+ "write": ".*"
+ }
+ ],
+ "policies": [],
+ "queues": [],
+ "rabbit_version": "3.9.1",
+ "rabbitmq_version": "3.9.1",
+ "topic_permissions": [],
+ "users": [
+ {
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "limits": {"max-connections" : "twomincepies"},
+ "name": "limited_guest",
+ "password_hash": "wS4AT3B4Z5RpWlFn1FA30osf2C75D7WA3gem591ACDZ6saO6",
+ "tags": [
+ "administrator"
+ ]
+ }
+ ],
+ "vhosts": [
+ {
+ "limits": [],
+ "name": "/"
+ },
+ {
+ "limits": [],
+ "name": "tagged"
+ }
+ ]
+}
diff --git a/deps/rabbit/test/disconnect_detected_during_alarm_SUITE.erl b/deps/rabbit/test/disconnect_detected_during_alarm_SUITE.erl
index 820e13efa0..d32cf306bd 100644
--- a/deps/rabbit/test/disconnect_detected_during_alarm_SUITE.erl
+++ b/deps/rabbit/test/disconnect_detected_during_alarm_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(disconnect_detected_during_alarm_SUITE).
diff --git a/deps/rabbit/test/dummy_event_receiver.erl b/deps/rabbit/test/dummy_event_receiver.erl
index 3d417b601b..6267d1c9c5 100644
--- a/deps/rabbit/test/dummy_event_receiver.erl
+++ b/deps/rabbit/test/dummy_event_receiver.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(dummy_event_receiver).
@@ -12,7 +12,7 @@
-export([init/1, handle_call/2, handle_event/2, handle_info/2,
terminate/2, code_change/3]).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
start(Pid, Nodes, Types) ->
Oks = [ok || _ <- Nodes],
diff --git a/deps/rabbit/test/dummy_interceptor.erl b/deps/rabbit/test/dummy_interceptor.erl
index 6d510a3073..d173e1474a 100644
--- a/deps/rabbit/test/dummy_interceptor.erl
+++ b/deps/rabbit/test/dummy_interceptor.erl
@@ -19,8 +19,14 @@ intercept(#'basic.publish'{} = Method, Content, _IState) ->
Content2 = Content#content{payload_fragments_rev = []},
{Method, Content2};
+%% Use 'queue.declare' to test #amqp_error{} handling
+intercept(#'queue.declare'{queue = <<"failing-q">>}, _Content, _IState) ->
+ rabbit_misc:amqp_error(
+ 'precondition_failed', "operation not allowed", [],
+ 'queue.declare');
+
intercept(Method, Content, _VHost) ->
{Method, Content}.
applies_to() ->
- ['basic.publish'].
+ ['basic.publish', 'queue.declare'].
diff --git a/deps/rabbit/test/dummy_runtime_parameters.erl b/deps/rabbit/test/dummy_runtime_parameters.erl
index 01d0b74f95..07bc33e602 100644
--- a/deps/rabbit/test/dummy_runtime_parameters.erl
+++ b/deps/rabbit/test/dummy_runtime_parameters.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(dummy_runtime_parameters).
diff --git a/deps/rabbit/test/dummy_supervisor2.erl b/deps/rabbit/test/dummy_supervisor2.erl
index 354b3a0854..210acbef21 100644
--- a/deps/rabbit/test/dummy_supervisor2.erl
+++ b/deps/rabbit/test/dummy_supervisor2.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(dummy_supervisor2).
diff --git a/deps/rabbit/test/dynamic_ha_SUITE.erl b/deps/rabbit/test/dynamic_ha_SUITE.erl
index 85969135b6..2492a2c577 100644
--- a/deps/rabbit/test/dynamic_ha_SUITE.erl
+++ b/deps/rabbit/test/dynamic_ha_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(dynamic_ha_SUITE).
diff --git a/deps/rabbit/test/dynamic_qq_SUITE.erl b/deps/rabbit/test/dynamic_qq_SUITE.erl
index 9a8f2110d6..391bb96d55 100644
--- a/deps/rabbit/test/dynamic_qq_SUITE.erl
+++ b/deps/rabbit/test/dynamic_qq_SUITE.erl
@@ -2,18 +2,18 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(dynamic_qq_SUITE).
-include_lib("common_test/include/ct.hrl").
--include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-import(quorum_queue_utils, [wait_for_messages_ready/3,
ra_name/1]).
+-compile(nowarn_export_all).
-compile(export_all).
all() ->
@@ -57,27 +57,33 @@ end_per_group(_, Config) ->
Config.
init_per_testcase(Testcase, Config) ->
- rabbit_ct_helpers:testcase_started(Config, Testcase),
- ClusterSize = ?config(rmq_nodes_count, Config),
- TestNumber = rabbit_ct_helpers:testcase_number(Config, ?MODULE, Testcase),
- Group = proplists:get_value(name, ?config(tc_group_properties, Config)),
- Q = rabbit_data_coercion:to_binary(io_lib:format("~p_~p", [Group, Testcase])),
- Config1 = rabbit_ct_helpers:set_config(Config, [
- {rmq_nodename_suffix, Testcase},
- {tcp_ports_base, {skip_n_nodes, TestNumber * ClusterSize}},
- {queue_name, Q},
- {queue_args, [{<<"x-queue-type">>, longstr, <<"quorum">>}]}
- ]),
- Config2 = rabbit_ct_helpers:run_steps(
- Config1,
- rabbit_ct_broker_helpers:setup_steps() ++
- rabbit_ct_client_helpers:setup_steps()),
- case rabbit_ct_broker_helpers:enable_feature_flag(Config2, quorum_queue) of
- ok ->
- Config2;
- Skip ->
- end_per_testcase(Testcase, Config2),
- Skip
+ case rabbit_ct_helpers:is_mixed_versions() andalso
+ Testcase == quorum_unaffected_after_vhost_failure of
+ true ->
+ {skip, "test case not mixed versions compatible"};
+ false ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ ClusterSize = ?config(rmq_nodes_count, Config),
+ TestNumber = rabbit_ct_helpers:testcase_number(Config, ?MODULE, Testcase),
+ Group = proplists:get_value(name, ?config(tc_group_properties, Config)),
+ Q = rabbit_data_coercion:to_binary(io_lib:format("~p_~p", [Group, Testcase])),
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_nodename_suffix, Testcase},
+ {tcp_ports_base, {skip_n_nodes, TestNumber * ClusterSize}},
+ {queue_name, Q},
+ {queue_args, [{<<"x-queue-type">>, longstr, <<"quorum">>}]}
+ ]),
+ Config2 = rabbit_ct_helpers:run_steps(
+ Config1,
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps()),
+ case rabbit_ct_broker_helpers:enable_feature_flag(Config2, quorum_queue) of
+ ok ->
+ Config2;
+ Skip ->
+ end_per_testcase(Testcase, Config2),
+ Skip
+ end
end.
end_per_testcase(Testcase, Config) ->
@@ -197,7 +203,7 @@ quorum_unaffected_after_vhost_failure(Config) ->
?assertEqual(Servers, lists:sort(proplists:get_value(online, Info, []))).
recover_follower_after_standalone_restart(Config) ->
- case os:getenv("SECONDARY_UMBRELLA") of
+ case rabbit_ct_helpers:is_mixed_versions() of
false ->
%% Tests that followers can be brought up standalone after forgetting the
%% rest of the cluster. Consensus won't be reached as there is only one node in the
diff --git a/deps/rabbit/test/eager_sync_SUITE.erl b/deps/rabbit/test/eager_sync_SUITE.erl
index a9e2ea2107..9605b14155 100644
--- a/deps/rabbit/test/eager_sync_SUITE.erl
+++ b/deps/rabbit/test/eager_sync_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(eager_sync_SUITE).
@@ -32,6 +32,12 @@ groups() ->
]}
].
+suite() ->
+ [
+ %% If a test hangs, no need to wait for 30 minutes.
+ {timetrap, {minutes, 15}}
+ ].
+
%% -------------------------------------------------------------------
%% Testsuite setup/teardown.
%% -------------------------------------------------------------------
diff --git a/deps/rabbit/test/feature_flags_SUITE.erl b/deps/rabbit/test/feature_flags_SUITE.erl
index 29dfcf068b..c98369f596 100644
--- a/deps/rabbit/test/feature_flags_SUITE.erl
+++ b/deps/rabbit/test/feature_flags_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(feature_flags_SUITE).
@@ -61,14 +61,12 @@ groups() ->
{enabling_on_single_node, [],
[
enable_feature_flag_in_a_healthy_situation,
- enable_unsupported_feature_flag_in_a_healthy_situation,
- enable_feature_flag_when_ff_file_is_unwritable
+ enable_unsupported_feature_flag_in_a_healthy_situation
]},
{enabling_in_cluster, [],
[
enable_feature_flag_in_a_healthy_situation,
enable_unsupported_feature_flag_in_a_healthy_situation,
- enable_feature_flag_when_ff_file_is_unwritable,
enable_feature_flag_with_a_network_partition,
mark_feature_flag_as_enabled_with_a_network_partition
]},
@@ -107,29 +105,53 @@ init_per_group(enabling_on_single_node, Config) ->
Config,
[{rmq_nodes_count, 1}]);
init_per_group(enabling_in_cluster, Config) ->
- rabbit_ct_helpers:set_config(
- Config,
- [{rmq_nodes_count, 5}]);
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% This test relies on functions only exported for test,
+ %% which is not true of mixed version nodes in bazel
+ {skip, "mixed mode not supported"};
+ _ ->
+ rabbit_ct_helpers:set_config(
+ Config,
+ [{rmq_nodes_count, 5}])
+ end;
init_per_group(clustering, Config) ->
- Config1 = rabbit_ct_helpers:set_config(
- Config,
- [{rmq_nodes_count, 2},
- {rmq_nodes_clustered, false},
- {start_rmq_with_plugins_disabled, true}]),
- rabbit_ct_helpers:run_setup_steps(Config1, [
- fun build_my_plugin/1,
- fun work_around_cli_and_rabbit_circular_dep/1
- ]);
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% This test relies on functions only exported for test,
+ %% which is not true of mixed version nodes in bazel
+ {skip, "mixed mode not supported"};
+ _ ->
+ Config1 = rabbit_ct_helpers:set_config(
+ Config,
+ [{rmq_nodes_count, 2},
+ {rmq_nodes_clustered, false},
+ {start_rmq_with_plugins_disabled, true}]),
+ rabbit_ct_helpers:run_setup_steps(Config1, [
+ fun prepare_my_plugin/1,
+ fun work_around_cli_and_rabbit_circular_dep/1
+ ])
+ end;
init_per_group(activating_plugin, Config) ->
- Config1 = rabbit_ct_helpers:set_config(
- Config,
- [{rmq_nodes_count, 2},
- {rmq_nodes_clustered, true},
- {start_rmq_with_plugins_disabled, true}]),
- rabbit_ct_helpers:run_setup_steps(Config1, [
- fun build_my_plugin/1,
- fun work_around_cli_and_rabbit_circular_dep/1
- ]);
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% mixed mode testing in bazel uses a production build of
+ %% the broker, however this group invokes functions via
+ %% rpc that are only available in test builds
+ {skip, "mixed mode not supported"};
+ _ ->
+ Config1 = rabbit_ct_helpers:set_config(
+ Config,
+ [{rmq_nodes_count, 2},
+ {rmq_nodes_clustered, true},
+ {start_rmq_with_plugins_disabled, true}]),
+ rabbit_ct_helpers:run_setup_steps(
+ Config1,
+ [
+ fun prepare_my_plugin/1,
+ fun work_around_cli_and_rabbit_circular_dep/1
+ ])
+ end;
init_per_group(_, Config) ->
Config.
@@ -141,20 +163,7 @@ init_per_testcase(Testcase, Config) ->
TestNumber = rabbit_ct_helpers:testcase_number(Config, ?MODULE, Testcase),
case ?config(tc_group_properties, Config) of
[{name, registry} | _] ->
- application:set_env(lager, colored, true),
- application:set_env(
- lager,
- handlers, [{lager_console_backend, [{level, debug}]}]),
- application:set_env(
- lager,
- extra_sinks,
- [{rabbit_log_lager_event,
- [{handlers, [{lager_console_backend, [{level, debug}]}]}]
- },
- {rabbit_log_feature_flags_lager_event,
- [{handlers, [{lager_console_backend, [{level, debug}]}]}]
- }]),
- lager:start(),
+ logger:set_primary_config(level, debug),
FeatureFlagsFile = filename:join(?config(priv_dir, Config),
rabbit_misc:format(
"feature_flags-~s",
@@ -679,10 +688,10 @@ mark_feature_flag_as_enabled_with_a_network_partition(Config) ->
FeatureName,
true,
RemoteNodes},
- rabbit_ct_broker_helpers:rpc(
- Config, B,
- rabbit_feature_flags, mark_as_enabled_remotely,
- [RemoteNodes, FeatureName, true, 20000])),
+ catch rabbit_ct_broker_helpers:rpc(
+ Config, B,
+ rabbit_feature_flags, mark_as_enabled_remotely,
+ [RemoteNodes, FeatureName, true, 20000])),
RepairFun = fun() ->
%% Wait a few seconds before we repair the network.
@@ -998,6 +1007,17 @@ activating_plugin_with_new_ff_enabled(Config) ->
%% Internal helpers.
%% -------------------------------------------------------------------
+prepare_my_plugin(Config) ->
+ case os:getenv("RABBITMQ_RUN") of
+ false ->
+ build_my_plugin(Config);
+ _ ->
+ MyPluginDir = filename:dirname(filename:dirname(code:where_is_file("my_plugin.app"))),
+ PluginsDir = filename:dirname(MyPluginDir),
+ rabbit_ct_helpers:set_config(Config,
+ [{rmq_plugins_dir, PluginsDir}])
+ end.
+
build_my_plugin(Config) ->
PluginSrcDir = filename:join(?config(data_dir, Config), "my_plugin"),
PluginsDir = filename:join(PluginSrcDir, "plugins"),
diff --git a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/BUILD.bazel b/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/BUILD.bazel
new file mode 100644
index 0000000000..60e3e86cbc
--- /dev/null
+++ b/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/BUILD.bazel
@@ -0,0 +1,13 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlang_lib")
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+erlang_lib(
+ app_description = "Plugin to test feature flags",
+ app_name = "my_plugin",
+ app_version = "1.0.0",
+ deps = DEPS,
+)
diff --git a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/Makefile b/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/Makefile
index 8f6681090b..731855acfc 100644
--- a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/Makefile
+++ b/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/Makefile
@@ -11,5 +11,5 @@ DEPS = rabbit_common rabbit
DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-plugin.mk
DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
-include rabbitmq-components.mk
-include erlang.mk
+include ../../../../../rabbitmq-components.mk
+include ../../../../../erlang.mk
diff --git a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/erlang.mk b/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/erlang.mk
deleted file mode 100644
index f303054bad..0000000000
--- a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/erlang.mk
+++ /dev/null
@@ -1 +0,0 @@
-include ../../../erlang.mk
diff --git a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/rabbitmq-components.mk b/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/rabbitmq-components.mk
deleted file mode 100644
index 9f89dba726..0000000000
--- a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/rabbitmq-components.mk
+++ /dev/null
@@ -1 +0,0 @@
-include ../../../rabbitmq-components.mk
diff --git a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/src/my_plugin.erl b/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/src/my_plugin.erl
index 687acdb5de..2dd648c080 100644
--- a/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/src/my_plugin.erl
+++ b/deps/rabbit/test/feature_flags_SUITE_data/my_plugin/src/my_plugin.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(my_plugin).
diff --git a/deps/rabbit/test/feature_flags_with_unpriveleged_user_SUITE.erl b/deps/rabbit/test/feature_flags_with_unpriveleged_user_SUITE.erl
new file mode 100644
index 0000000000..bf01e2c181
--- /dev/null
+++ b/deps/rabbit/test/feature_flags_with_unpriveleged_user_SUITE.erl
@@ -0,0 +1,85 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(feature_flags_with_unpriveleged_user_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-export([suite/0,
+ all/0,
+ groups/0,
+ init_per_suite/1,
+ end_per_suite/1,
+ init_per_group/2,
+ end_per_group/2,
+ init_per_testcase/2,
+ end_per_testcase/2,
+
+ enable_feature_flag_when_ff_file_is_unwritable/1
+ ]).
+
+suite() ->
+ [{timetrap, {minutes, 5}}].
+
+all() ->
+ [
+ {group, enabling_on_single_node},
+ {group, enabling_in_cluster}
+ ].
+
+groups() ->
+ [
+ {enabling_on_single_node, [],
+ [
+ enable_feature_flag_when_ff_file_is_unwritable
+ ]},
+ {enabling_in_cluster, [],
+ [
+ enable_feature_flag_when_ff_file_is_unwritable
+ ]}
+ ].
+
+%% This suite exists to allow running a portion of the feature_flags_SUITE
+%% under separate conditions in ci
+
+init_per_suite(Config) ->
+ feature_flags_SUITE:init_per_suite(Config).
+
+end_per_suite(Config) ->
+ feature_flags_SUITE:end_per_suite(Config).
+
+
+init_per_group(enabling_in_cluster, Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% This test relies on functions only exported for test,
+ %% which is not true of mixed version nodes in bazel
+ {skip, "mixed mode not supported"};
+ _ ->
+ rabbit_ct_helpers:set_config(
+ Config,
+ [{rmq_nodes_count, 3}])
+ end;
+init_per_group(Group, Config) ->
+ feature_flags_SUITE:init_per_group(Group, Config).
+
+end_per_group(Group, Config) ->
+ feature_flags_SUITE:end_per_group(Group, Config).
+
+init_per_testcase(Testcase, Config) ->
+ feature_flags_SUITE:init_per_testcase(Testcase, Config).
+
+end_per_testcase(Testcase, Config) ->
+ feature_flags_SUITE:end_per_testcase(Testcase, Config).
+
+%% -------------------------------------------------------------------
+%% Testcases.
+%% -------------------------------------------------------------------
+
+enable_feature_flag_when_ff_file_is_unwritable(Config) ->
+ feature_flags_SUITE:enable_feature_flag_when_ff_file_is_unwritable(Config).
diff --git a/deps/rabbit/test/lazy_queue_SUITE.erl b/deps/rabbit/test/lazy_queue_SUITE.erl
index 8748b07aca..810c9e6f0c 100644
--- a/deps/rabbit/test/lazy_queue_SUITE.erl
+++ b/deps/rabbit/test/lazy_queue_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(lazy_queue_SUITE).
diff --git a/deps/rabbit/test/list_consumers_sanity_check_SUITE.erl b/deps/rabbit/test/list_consumers_sanity_check_SUITE.erl
index fbd31fa3e8..5bc9e3ead5 100644
--- a/deps/rabbit/test/list_consumers_sanity_check_SUITE.erl
+++ b/deps/rabbit/test/list_consumers_sanity_check_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(list_consumers_sanity_check_SUITE).
diff --git a/deps/rabbit/test/list_queues_online_and_offline_SUITE.erl b/deps/rabbit/test/list_queues_online_and_offline_SUITE.erl
index d26fdc03e2..03c0791f10 100644
--- a/deps/rabbit/test/list_queues_online_and_offline_SUITE.erl
+++ b/deps/rabbit/test/list_queues_online_and_offline_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(list_queues_online_and_offline_SUITE).
diff --git a/deps/rabbit/test/logging_SUITE.erl b/deps/rabbit/test/logging_SUITE.erl
new file mode 100644
index 0000000000..57bc11d8ca
--- /dev/null
+++ b/deps/rabbit/test/logging_SUITE.erl
@@ -0,0 +1,1332 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(logging_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-include_lib("kernel/include/logger.hrl").
+-include_lib("rabbit_common/include/logging.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+
+-export([suite/0,
+ all/0,
+ groups/0,
+ init_per_suite/1,
+ end_per_suite/1,
+ init_per_group/2,
+ end_per_group/2,
+ init_per_testcase/2,
+ end_per_testcase/2,
+
+ logging_with_default_config_works/1,
+ setting_log_levels_in_env_works/1,
+ setting_log_levels_in_config_works/1,
+ setting_log_levels_in_config_with_output_overridden_in_env_works/1,
+ setting_message_format_works/1,
+ setting_level_format_works/1,
+ setting_time_format_works/1,
+ logging_as_single_line_works/1,
+ logging_as_multi_line_works/1,
+ formatting_as_json_configured_in_env_works/1,
+ formatting_as_json_configured_in_config_works/1,
+ formatting_as_json_using_epoch_secs_timestamps_works/1,
+ renaming_json_fields_works/1,
+ removing_specific_json_fields_works/1,
+ removing_non_mentionned_json_fields_works/1,
+ configuring_verbosity_works/1,
+
+ logging_to_stdout_configured_in_env_works/1,
+ logging_to_stdout_configured_in_config_works/1,
+ logging_to_stderr_configured_in_env_works/1,
+ logging_to_stderr_configured_in_config_works/1,
+ formatting_with_colors_works/1,
+ formatting_without_colors_works/1,
+
+ logging_to_exchange_works/1,
+
+ logging_to_syslog_works/1]).
+
+suite() ->
+ [{timetrap, {minutes, 1}}].
+
+all() ->
+ [
+ {group, file_output},
+ {group, console_output},
+ {group, exchange_output},
+ {group, syslog_output}
+ ].
+
+groups() ->
+ [
+ {file_output, [],
+ [logging_with_default_config_works,
+ setting_log_levels_in_env_works,
+ setting_log_levels_in_config_works,
+ setting_log_levels_in_config_with_output_overridden_in_env_works,
+ setting_message_format_works,
+ setting_level_format_works,
+ setting_time_format_works,
+ logging_as_single_line_works,
+ logging_as_multi_line_works,
+ formatting_as_json_configured_in_env_works,
+ formatting_as_json_configured_in_config_works,
+ formatting_as_json_using_epoch_secs_timestamps_works,
+ renaming_json_fields_works,
+ removing_specific_json_fields_works,
+ removing_non_mentionned_json_fields_works,
+ configuring_verbosity_works]},
+
+ {console_output, [],
+ [logging_to_stdout_configured_in_env_works,
+ logging_to_stdout_configured_in_config_works,
+ logging_to_stderr_configured_in_env_works,
+ logging_to_stderr_configured_in_config_works,
+ formatting_with_colors_works,
+ formatting_without_colors_works]},
+
+ {exchange_output, [],
+ [logging_to_exchange_works]},
+
+ {syslog_output, [],
+ [logging_to_syslog_works]}
+ ].
+
+init_per_suite(Config) ->
+ rabbit_ct_helpers:log_environment(),
+ rabbit_ct_helpers:run_setup_steps(Config).
+
+end_per_suite(Config) ->
+ Config.
+
+init_per_group(syslog_output, Config) ->
+ Config1 = start_fake_syslogd(Config),
+ TcpPort = ?config(syslogd_tcp_port, Config1),
+ ok = application:set_env(
+ syslog, logger, [],
+ [{persistent, true}]),
+ ok = application:set_env(
+ syslog, syslog_error_logger, false,
+ [{persistent, true}]),
+ ok = application:set_env(
+ syslog, protocol, {rfc3164, tcp},
+ [{persistent, true}]),
+ ok = application:set_env(
+ syslog, dest_port, TcpPort,
+ [{persistent, true}]),
+ {ok, _} = application:ensure_all_started(syslog),
+ Config1;
+init_per_group(_, Config) ->
+ Config.
+
+end_per_group(syslog_output, Config) ->
+ ok = application:stop(syslog),
+ stop_fake_syslogd(Config);
+end_per_group(_, Config) ->
+ Config.
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ GroupProps = ?config(tc_group_properties, Config),
+ Group = proplists:get_value(name, GroupProps),
+ case Group of
+ %% The exchange output requires RabbitMQ to run. All testcases in this
+ %% group will run in the context of that RabbitMQ node.
+ exchange_output ->
+ ExchProps = case Testcase of
+ logging_to_exchange_works ->
+ [{enabled, true},
+ {level, info}]
+ end,
+ Config1 = rabbit_ct_helpers:set_config(
+ Config,
+ [{rmq_nodes_count, 1},
+ {rmq_nodename_suffix, Testcase}]),
+ Config2 = rabbit_ct_helpers:merge_app_env(
+ Config1,
+ {rabbit, [{log, [{exchange, ExchProps},
+ {file, [{level, info}]}]}]}),
+ rabbit_ct_helpers:run_steps(
+ Config2,
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps());
+
+ %% Other groups and testcases runs the tested code directly without a
+ %% RabbitMQ node running.
+ _ ->
+ remove_all_handlers(),
+ application:unset_env(rabbit, log),
+ LogBaseDir = filename:join(
+ ?config(priv_dir, Config),
+ atom_to_list(Testcase)),
+ rabbit_ct_helpers:set_config(
+ Config, {log_base_dir, LogBaseDir})
+ end.
+
+end_per_testcase(Testcase, Config) ->
+ Config1 = case rabbit_ct_helpers:get_config(Config, rmq_nodes_count) of
+ undefined ->
+ application:unset_env(rabbit, log),
+ Config;
+ _ ->
+ rabbit_ct_helpers:run_steps(
+ Config,
+ rabbit_ct_client_helpers:teardown_steps() ++
+ rabbit_ct_broker_helpers:teardown_steps())
+ end,
+ rabbit_ct_helpers:testcase_finished(Config1, Testcase).
+
+remove_all_handlers() ->
+ _ = [logger:remove_handler(Id)
+ || #{id := Id} <- logger:get_handler_config()].
+
+%% -------------------------------------------------------------------
+%% Testcases.
+%% -------------------------------------------------------------------
+
+logging_with_default_config_works(Config) ->
+ Context = default_context(Config),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Handlers = logger:get_handler_config(),
+
+ MainFileHandler = get_handler_by_id(Handlers, rmq_1_file_1),
+ MainFile = main_log_file_in_context(Context),
+ ?assertNotEqual(undefined, MainFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := log,
+ filters := [{progress_reports, {_, stop}},
+ {rmqlog_filter, {_, #{global := info,
+ upgrade := none}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := MainFile}},
+ MainFileHandler),
+
+ UpgradeFileHandler = get_handler_by_id(Handlers, rmq_1_file_2),
+ UpgradeFile = upgrade_log_file_in_context(Context),
+ ?assertNotEqual(undefined, UpgradeFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := stop,
+ filters := [{rmqlog_filter, {_, #{upgrade := info}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := UpgradeFile}},
+ UpgradeFileHandler),
+
+ ?assert(ping_log(rmq_1_file_1, info)),
+ ?assert(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ?assert(ping_log(rmq_1_file_1, info,
+ #{domain => ['3rd_party']})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+
+ ?assert(ping_log(rmq_1_file_2, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+ ?assertNot(ping_log(rmq_1_file_2, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ok.
+
+setting_log_levels_in_env_works(Config) ->
+ GlobalLevel = warning,
+ PrelaunchLevel = error,
+ MinLevel = rabbit_prelaunch_logging:get_less_severe_level(
+ GlobalLevel, PrelaunchLevel),
+ #{var_origins := Origins0} = Context0 = default_context(Config),
+ Context = Context0#{log_levels => #{global => GlobalLevel,
+ "prelaunch" => PrelaunchLevel},
+ var_origins => Origins0#{log_levels => environment}},
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Handlers = logger:get_handler_config(),
+
+ MainFileHandler = get_handler_by_id(Handlers, rmq_1_file_1),
+ MainFile = main_log_file_in_context(Context),
+ ?assertNotEqual(undefined, MainFileHandler),
+ ?assertMatch(
+ #{level := MinLevel,
+ module := rabbit_logger_std_h,
+ filter_default := log,
+ filters := [{progress_reports, {_, stop}},
+ {rmqlog_filter, {_, #{global := GlobalLevel,
+ prelaunch := PrelaunchLevel,
+ upgrade := none}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := MainFile}},
+ MainFileHandler),
+
+ UpgradeFileHandler = get_handler_by_id(Handlers, rmq_1_file_2),
+ UpgradeFile = upgrade_log_file_in_context(Context),
+ ?assertNotEqual(undefined, UpgradeFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := stop,
+ filters := [{rmqlog_filter, {_, #{upgrade := info}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := UpgradeFile}},
+ UpgradeFileHandler),
+
+ ?assertNot(ping_log(rmq_1_file_1, info)),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})),
+ ?assertNot(ping_log(rmq_1_file_1, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ['3rd_party']})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+
+ ?assert(ping_log(rmq_1_file_1, GlobalLevel)),
+ ?assert(ping_log(rmq_1_file_1, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ?assert(ping_log(rmq_1_file_1, PrelaunchLevel,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})),
+ ?assert(ping_log(rmq_1_file_1, GlobalLevel,
+ #{domain => ['3rd_party']})),
+ ?assertNot(ping_log(rmq_1_file_1, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+
+ ?assert(ping_log(rmq_1_file_2, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+ ?assertNot(ping_log(rmq_1_file_2, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ok.
+
+setting_log_levels_in_config_works(Config) ->
+ GlobalLevel = warning,
+ PrelaunchLevel = error,
+ MinLevel = rabbit_prelaunch_logging:get_less_severe_level(
+ GlobalLevel, PrelaunchLevel),
+ Context = default_context(Config),
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{level, GlobalLevel}]},
+ {categories, [{prelaunch, [{level, PrelaunchLevel}]}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Handlers = logger:get_handler_config(),
+
+ MainFileHandler = get_handler_by_id(Handlers, rmq_1_file_1),
+ MainFile = main_log_file_in_context(Context),
+ ?assertNotEqual(undefined, MainFileHandler),
+ ?assertMatch(
+ #{level := MinLevel,
+ module := rabbit_logger_std_h,
+ filter_default := log,
+ filters := [{progress_reports, {_, stop}},
+ {rmqlog_filter, {_, #{global := GlobalLevel,
+ prelaunch := PrelaunchLevel,
+ upgrade := none}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := MainFile}},
+ MainFileHandler),
+
+ UpgradeFileHandler = get_handler_by_id(Handlers, rmq_1_file_2),
+ UpgradeFile = upgrade_log_file_in_context(Context),
+ ?assertNotEqual(undefined, UpgradeFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := stop,
+ filters := [{rmqlog_filter, {_, #{upgrade := info}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := UpgradeFile}},
+ UpgradeFileHandler),
+
+ ?assertNot(ping_log(rmq_1_file_1, info)),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})),
+ ?assertNot(ping_log(rmq_1_file_1, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ['3rd_party']})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+
+ ?assert(ping_log(rmq_1_file_1, GlobalLevel)),
+ ?assert(ping_log(rmq_1_file_1, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ?assert(ping_log(rmq_1_file_1, PrelaunchLevel,
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH})),
+ ?assert(ping_log(rmq_1_file_1, GlobalLevel,
+ #{domain => ['3rd_party']})),
+ ?assertNot(ping_log(rmq_1_file_1, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+
+ ?assert(ping_log(rmq_1_file_2, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+ ?assertNot(ping_log(rmq_1_file_2, GlobalLevel,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ok.
+
+setting_log_levels_in_config_with_output_overridden_in_env_works(Config) ->
+ #{var_origins := Origins0} = Context0 = default_context(Config),
+ Context = Context0#{main_log_file => "-",
+ var_origins => Origins0#{
+ main_log_file => environment}},
+ ok = application:set_env(
+ rabbit, log, [{console, [{level, debug}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Handlers = logger:get_handler_config(),
+
+ StddevHandler = get_handler_by_id(Handlers, rmq_1_stdout),
+ ?assertNotEqual(undefined, StddevHandler),
+ ?assertMatch(
+ #{level := debug,
+ module := rabbit_logger_std_h,
+ filter_default := log,
+ filters := [{progress_reports, {_, log}},
+ {rmqlog_filter, {_, #{global := debug,
+ upgrade := none}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := standard_io}},
+ StddevHandler),
+
+ UpgradeFileHandler = get_handler_by_id(Handlers, rmq_1_file_1),
+ UpgradeFile = upgrade_log_file_in_context(Context),
+ ?assertNotEqual(undefined, UpgradeFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := stop,
+ filters := [{rmqlog_filter, {_, #{upgrade := info}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := UpgradeFile}},
+ UpgradeFileHandler),
+
+ ?assert(ping_log(rmq_1_stdout, debug, Config)),
+ ?assert(ping_log(rmq_1_stdout, debug,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}, Config)),
+ ?assert(ping_log(rmq_1_stdout, debug,
+ #{domain => ['3rd_party']}, Config)),
+ ?assertNot(ping_log(rmq_1_stdout, debug,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE}, Config)),
+ ok.
+
+setting_message_format_works(Config) ->
+ Context = default_context(Config),
+ Format = ["level=", level, " ",
+ "md_key=", md_key, " ",
+ "unknown_field=", unknown_field, " ",
+ "msg=", msg],
+ {PrefixFormat, LineFormat} =
+ rabbit_prelaunch_early_logging:determine_prefix(Format),
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_text_fmt,
+ #{prefix_format => PrefixFormat,
+ line_format => LineFormat}}}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Metadata = #{md_key => "md_value"},
+ {RandomMsg, Line} = log_and_return_line(Context, Metadata),
+
+ RandomMsgBin = list_to_binary(RandomMsg),
+ ?assertEqual(
+ <<"level=warn ",
+ "md_key=md_value ",
+ "unknown_field=<unknown unknown_field> "
+ "msg=", RandomMsgBin/binary>>,
+ Line).
+
+setting_level_format_works(Config) ->
+ LevelFormats = #{lc => "warning",
+ uc => "WARNING",
+ lc3 => "wrn",
+ uc3 => "WRN",
+ lc4 => "warn",
+ uc4 => "WARN"},
+ maps:fold(
+ fun(LevelFormat, LevelName, Acc) ->
+ remove_all_handlers(),
+ setting_level_format_works(
+ LevelFormat, list_to_binary(LevelName), Config),
+ Acc
+ end, ok, LevelFormats).
+
+setting_level_format_works(LevelFormat, LevelName, Config) ->
+ Context = default_context(Config),
+ Format = [level, " ", msg],
+ {PrefixFormat, LineFormat} =
+ rabbit_prelaunch_early_logging:determine_prefix(Format),
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_text_fmt,
+ #{level_format => LevelFormat,
+ prefix_format => PrefixFormat,
+ line_format => LineFormat}}}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ {RandomMsg, Line} = log_and_return_line(Context, #{}),
+
+ RandomMsgBin = list_to_binary(RandomMsg),
+ ?assertEqual(
+ <<LevelName/binary, " ", RandomMsgBin/binary>>,
+ Line).
+
+setting_time_format_works(Config) ->
+ DateTime = "2018-05-01T16:17:58.123456+01:00",
+ Timestamp = calendar:rfc3339_to_system_time(
+ DateTime, [{unit, microsecond}]),
+ TimeFormats =
+ #{{rfc3339, $T, "+01:00"} => DateTime,
+ {rfc3339, $\s, "+01:00"} => "2018-05-01 16:17:58.123456+01:00",
+ {epoch, usecs, binary} => integer_to_list(Timestamp),
+ {epoch, secs, binary} => io_lib:format("~.6.0f", [Timestamp / 1000000]),
+ {universal,
+ "~4..0b-~2..0b-~2..0b "
+ "~2..0b:~2..0b:~2..0b.~3..0b",
+ [year, month, day,
+ hour, minute, second,
+ {second_fractional, 3}]} => "2018-05-01 15:17:58.123"},
+ maps:fold(
+ fun(TimeFormat, TimeValue, Acc) ->
+ remove_all_handlers(),
+ setting_time_format_works(
+ Timestamp, TimeFormat, list_to_binary(TimeValue), Config),
+ Acc
+ end, ok, TimeFormats).
+
+setting_time_format_works(Timestamp, TimeFormat, TimeValue, Config) ->
+ Context = default_context(Config),
+ Format = [time, " ", msg],
+ {PrefixFormat, LineFormat} =
+ rabbit_prelaunch_early_logging:determine_prefix(Format),
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_text_fmt,
+ #{time_format => TimeFormat,
+ prefix_format => PrefixFormat,
+ line_format => LineFormat}}}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Metadata = #{time => Timestamp},
+ {RandomMsg, Line} = log_and_return_line(Context, Metadata),
+
+ RandomMsgBin = list_to_binary(RandomMsg),
+ ?assertEqual(
+ <<TimeValue/binary, " ", RandomMsgBin/binary>>,
+ Line).
+
+logging_as_single_line_works(Config) ->
+ logging_as_single_or_multi_line_works(false, Config).
+
+logging_as_multi_line_works(Config) ->
+ logging_as_single_or_multi_line_works(true, Config).
+
+logging_as_single_or_multi_line_works(AsMultiline, Config) ->
+ Context = default_context(Config),
+ Format = [time, " ", msg],
+ {PrefixFormat, LineFormat} =
+ rabbit_prelaunch_early_logging:determine_prefix(Format),
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_text_fmt,
+ #{single_line => not AsMultiline,
+ prefix_format => PrefixFormat,
+ line_format => LineFormat}}}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ RandomMsg1 = get_random_string(
+ 32,
+ "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"),
+ RandomMsg2 = get_random_string(
+ 32,
+ "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"),
+ ?LOG_WARNING(RandomMsg1 ++ "\n" ++ RandomMsg2, #{}),
+
+ rabbit_logger_std_h:filesync(rmq_1_file_1),
+ MainFile = main_log_file_in_context(Context),
+ {ok, Content} = file:read_file(MainFile),
+ ReOpts = [{capture, none}, multiline],
+ case AsMultiline of
+ true ->
+ match = re:run(Content, RandomMsg1 ++ "$", ReOpts),
+ match = re:run(Content, RandomMsg2 ++ "$", ReOpts);
+ false ->
+ match = re:run(
+ Content,
+ RandomMsg1 ++ ", " ++ RandomMsg2 ++ "$",
+ ReOpts)
+ end.
+
+formatting_as_json_configured_in_env_works(Config) ->
+ #{var_origins := Origins0} = Context0 = default_context(Config),
+ Context = Context0#{log_levels => #{json => true},
+ var_origins => Origins0#{log_levels => environment}},
+ formatting_as_json_works(Config, Context).
+
+formatting_as_json_configured_in_config_works(Config) ->
+ Context = default_context(Config),
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_json_fmt, #{}}}]}],
+ [{persistent, true}]),
+ formatting_as_json_works(Config, Context).
+
+formatting_as_json_using_epoch_secs_timestamps_works(Config) ->
+ Context = default_context(Config),
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_json_fmt,
+ #{time_format => {epoch, secs, int}}}}]}],
+ [{persistent, true}]),
+ formatting_as_json_works(Config, Context).
+
+formatting_as_json_works(_, Context) ->
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Handlers = logger:get_handler_config(),
+
+ MainFileHandler = get_handler_by_id(Handlers, rmq_1_file_1),
+ MainFile = main_log_file_in_context(Context),
+ ?assertNotEqual(undefined, MainFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := log,
+ filters := [{progress_reports, {_, stop}},
+ {rmqlog_filter, {_, #{global := info,
+ upgrade := none}}}],
+ formatter := {rabbit_logger_json_fmt, _},
+ config := #{type := file,
+ file := MainFile}},
+ MainFileHandler),
+
+ ?assertNot(ping_log(rmq_1_file_1, info)),
+
+ Metadata = #{atom => rabbit,
+ integer => 1,
+ float => 1.42,
+ string => "string",
+ list => ["s", a, 3],
+ map => #{key => "value"},
+ function => fun get_random_string/2,
+ pid => self(),
+ port => hd(erlang:ports()),
+ ref => erlang:make_ref()},
+ {RandomMsg, Term} = log_and_return_json_object(
+ Context, Metadata, [return_maps]),
+
+ RandomMsgBin = list_to_binary(RandomMsg),
+ ?assertMatch(#{time := _}, Term),
+ ?assertMatch(#{level := <<"info">>}, Term),
+ ?assertMatch(#{msg := RandomMsgBin}, Term),
+
+ FunBin = list_to_binary(erlang:fun_to_list(maps:get(function, Metadata))),
+ PidBin = list_to_binary(erlang:pid_to_list(maps:get(pid, Metadata))),
+ PortBin = list_to_binary(erlang:port_to_list(maps:get(port, Metadata))),
+ RefBin = list_to_binary(erlang:ref_to_list(maps:get(ref, Metadata))),
+ ?assertMatch(#{atom := <<"rabbit">>}, Term),
+ ?assertMatch(#{integer := 1}, Term),
+ ?assertMatch(#{float := 1.42}, Term),
+ ?assertMatch(#{string := <<"string">>}, Term),
+ ?assertMatch(#{list := [<<"s">>, <<"a">>, 3]}, Term),
+ ?assertMatch(#{map := #{key := <<"value">>}}, Term),
+ ?assertMatch(#{function := FunBin}, Term),
+ ?assertMatch(#{pid := PidBin}, Term),
+ ?assertMatch(#{port := PortBin}, Term),
+ ?assertMatch(#{ref := RefBin}, Term).
+
+renaming_json_fields_works(Config) ->
+ Context = default_context(Config),
+ FieldMap = [{integer, int},
+ {msg, m},
+ {unknown_field, still_unknown_field},
+ {'$REST', false}],
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_json_fmt,
+ #{field_map => FieldMap}}}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Metadata = #{atom => rabbit,
+ integer => 1,
+ string => "string",
+ list => ["s", a, 3]},
+ {RandomMsg, Term} = log_and_return_json_object(Context, Metadata, [return_maps]),
+
+ RandomMsgBin = list_to_binary(RandomMsg),
+ ?assertMatch(
+ #{int := 1,
+ m := RandomMsgBin} = M
+ when map_size(M) == 2,
+ Term).
+
+removing_specific_json_fields_works(Config) ->
+ Context = default_context(Config),
+ FieldMap = [{integer, integer},
+ {msg, msg},
+ {list, false}],
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_json_fmt,
+ #{field_map => FieldMap}}}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Metadata = #{atom => rabbit,
+ integer => 1,
+ string => "string",
+ list => ["s", a, 3]},
+ {RandomMsg, Term} = log_and_return_json_object(Context, Metadata, [return_maps]),
+
+ RandomMsgBin = list_to_binary(RandomMsg),
+ ?assertMatch(
+ #{integer := 1,
+ msg := RandomMsgBin,
+ string := <<"string">>},
+ Term).
+
+removing_non_mentionned_json_fields_works(Config) ->
+ Context = default_context(Config),
+ FieldMap = [{integer, integer},
+ {msg, msg},
+ {'$REST', false}],
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_json_fmt,
+ #{field_map => FieldMap}}}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Metadata = #{atom => rabbit,
+ integer => 1,
+ string => "string",
+ list => ["s", a, 3]},
+ {RandomMsg, Term} = log_and_return_json_object(Context, Metadata, [return_maps]),
+
+ RandomMsgBin = list_to_binary(RandomMsg),
+ ?assertMatch(
+ #{integer := 1,
+ msg := RandomMsgBin} = M
+ when map_size(M) == 2,
+ Term).
+
+configuring_verbosity_works(Config) ->
+ Context = default_context(Config),
+ FieldMap = [{verbosity, v},
+ {msg, msg},
+ {'$REST', false}],
+ VerbMap = #{debug => 2,
+ info => 1,
+ '$REST' => 0},
+ ok = application:set_env(
+ rabbit, log,
+ [{file, [{formatter, {rabbit_logger_json_fmt,
+ #{field_map => FieldMap,
+ verbosity_map => VerbMap}}}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ {RandomMsg, Term} = log_and_return_json_object(Context, #{}, [return_maps]),
+
+ RandomMsgBin = list_to_binary(RandomMsg),
+ ?assertMatch(
+ #{v := 1,
+ msg := RandomMsgBin} = M
+ when map_size(M) == 2,
+ Term).
+
+logging_to_stdout_configured_in_env_works(Config) ->
+ #{var_origins := Origins0} = Context0 = default_context(Config),
+ Context = Context0#{main_log_file => "-",
+ var_origins => Origins0#{
+ main_log_file => environment}},
+ logging_to_stddev_works(standard_io, rmq_1_stdout, Config, Context).
+
+logging_to_stdout_configured_in_config_works(Config) ->
+ Context = default_context(Config),
+ ok = application:set_env(
+ rabbit, log, [{console, [{enabled, true}]}],
+ [{persistent, true}]),
+ logging_to_stddev_works(standard_io, rmq_1_stdout, Config, Context).
+
+logging_to_stderr_configured_in_env_works(Config) ->
+ #{var_origins := Origins0} = Context0 = default_context(Config),
+ Context = Context0#{main_log_file => "-stderr",
+ var_origins => Origins0#{
+ main_log_file => environment}},
+ logging_to_stddev_works(standard_error, rmq_1_stderr, Config, Context).
+
+logging_to_stderr_configured_in_config_works(Config) ->
+ Context = default_context(Config),
+ ok = application:set_env(
+ rabbit, log, [{console, [{enabled, true},
+ {stdio, stderr}]}],
+ [{persistent, true}]),
+ logging_to_stddev_works(standard_error, rmq_1_stderr, Config, Context).
+
+logging_to_stddev_works(Stddev, Id, Config, Context) ->
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ Handlers = logger:get_handler_config(),
+
+ StddevHandler = get_handler_by_id(Handlers, Id),
+ ?assertNotEqual(undefined, StddevHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := log,
+ filters := [{progress_reports, {_, stop}},
+ {rmqlog_filter, {_, #{global := info,
+ upgrade := none}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := Stddev}},
+ StddevHandler),
+
+ UpgradeFileHandler = get_handler_by_id(Handlers, rmq_1_file_1),
+ UpgradeFile = upgrade_log_file_in_context(Context),
+ ?assertNotEqual(undefined, UpgradeFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := stop,
+ filters := [{rmqlog_filter, {_, #{upgrade := info}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := UpgradeFile}},
+ UpgradeFileHandler),
+
+ ?assert(ping_log(Id, info, Config)),
+ ?assert(ping_log(Id, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}, Config)),
+ ?assert(ping_log(Id, info,
+ #{domain => ['3rd_party']}, Config)),
+ ?assertNot(ping_log(Id, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE}, Config)),
+
+ ?assert(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ok.
+
+formatting_with_colors_works(Config) ->
+ EscSeqs = make_color_esc_seqs_map(),
+ Context = default_context(Config),
+ ok = application:set_env(
+ rabbit, log, [{console, [{level, debug},
+ {formatter,
+ {rabbit_logger_text_fmt,
+ #{use_colors => true,
+ color_esc_seqs => EscSeqs}}}]}],
+ [{persistent, true}]),
+ formatting_maybe_with_colors_works(Config, Context, EscSeqs).
+
+formatting_without_colors_works(Config) ->
+ EscSeqs = make_color_esc_seqs_map(),
+ Context = default_context(Config),
+ ok = application:set_env(
+ rabbit, log, [{console, [{level, debug},
+ {formatter,
+ {rabbit_logger_text_fmt,
+ #{use_colors => false,
+ color_esc_seqs => EscSeqs}}}]}],
+ [{persistent, true}]),
+ formatting_maybe_with_colors_works(Config, Context, EscSeqs).
+
+make_color_esc_seqs_map() ->
+ lists:foldl(
+ fun(Lvl, Acc) ->
+ EscSeq = "[" ++ atom_to_list(Lvl) ++ " color]",
+ Acc#{Lvl => EscSeq}
+ end, #{}, rabbit_prelaunch_early_logging:levels()).
+
+formatting_maybe_with_colors_works(Config, Context, _EscSeqs) ->
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+
+ ?assert(ping_log(rmq_1_stdout, debug, Config)),
+ ?assert(ping_log(rmq_1_stdout, info, Config)),
+ ?assert(ping_log(rmq_1_stdout, notice, Config)),
+ ?assert(ping_log(rmq_1_stdout, warning, Config)),
+ ?assert(ping_log(rmq_1_stdout, error, Config)),
+ ?assert(ping_log(rmq_1_stdout, critical, Config)),
+ ?assert(ping_log(rmq_1_stdout, alert, Config)),
+ ?assert(ping_log(rmq_1_stdout, emergency, Config)),
+ ok.
+
+logging_to_exchange_works(Config) ->
+ Context = rabbit_ct_broker_helpers:rpc(
+ Config, 0,
+ rabbit_prelaunch, get_context, []),
+ Handlers = rabbit_ct_broker_helpers:rpc(
+ Config, 0,
+ logger, get_handler_config, []),
+
+ ExchangeHandler = get_handler_by_id(Handlers, rmq_1_exchange),
+ ?assertNotEqual(undefined, ExchangeHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_exchange_h,
+ filter_default := log,
+ filters := [{progress_reports, {_, stop}},
+ {rmqlog_filter, {_, #{global := info,
+ upgrade := none}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{exchange := _}},
+ ExchangeHandler),
+ #{config :=
+ #{exchange := #resource{name = XName} = Exchange}} = ExchangeHandler,
+
+ UpgradeFileHandler = get_handler_by_id(Handlers, rmq_1_file_2),
+ UpgradeFile = upgrade_log_file_in_context(Context),
+ ?assertNotEqual(undefined, UpgradeFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := stop,
+ filters := [{rmqlog_filter, {_, #{upgrade := info}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := UpgradeFile}},
+ UpgradeFileHandler),
+
+ %% Wait for the expected exchange to be automatically declared.
+ lists:any(
+ fun(_) ->
+ Ret = rabbit_ct_broker_helpers:rpc(
+ Config, 0,
+ rabbit_exchange, lookup, [Exchange]),
+ case Ret of
+ {ok, _} -> true;
+ _ -> timer:sleep(500),
+ false
+ end
+ end, lists:seq(1, 20)),
+
+ %% Declare a queue to collect all logged messages.
+ {Conn, Chan} = rabbit_ct_client_helpers:open_connection_and_channel(
+ Config),
+ QName = <<"log-messages">>,
+ ?assertMatch(
+ #'queue.declare_ok'{},
+ amqp_channel:call(Chan, #'queue.declare'{queue = QName,
+ durable = false})),
+ ?assertMatch(
+ #'queue.bind_ok'{},
+ amqp_channel:call(Chan, #'queue.bind'{queue = QName,
+ exchange = XName,
+ routing_key = <<"#">>})),
+ Config1 = rabbit_ct_helpers:set_config(
+ Config, {test_channel_and_queue, {Chan, QName}}),
+
+ ?assert(ping_log(rmq_1_exchange, info, Config1)),
+ ?assert(ping_log(rmq_1_exchange, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}, Config1)),
+ ?assert(ping_log(rmq_1_exchange, info,
+ #{domain => ['3rd_party']}, Config1)),
+ ?assertNot(ping_log(rmq_1_exchange, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE}, Config1)),
+
+ ?assert(ping_log(rmq_1_file_2, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE}, Config)),
+ ?assertNot(ping_log(rmq_1_file_2, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}, Config)),
+
+ amqp_channel:call(Chan, #'queue.delete'{queue = QName}),
+ rabbit_ct_client_helpers:close_connection_and_channel(Conn, Chan),
+ ok.
+
+logging_to_syslog_works(Config) ->
+ Context = default_context(Config),
+ ok = application:set_env(
+ rabbit, log, [{syslog, [{enabled, true}]}],
+ [{persistent, true}]),
+ rabbit_prelaunch_logging:clear_config_run_number(),
+ rabbit_prelaunch_logging:setup(Context),
+ clear_syslogd_messages(Config),
+
+ Handlers = logger:get_handler_config(),
+
+ SyslogHandler = get_handler_by_id(Handlers, rmq_1_syslog),
+ ?assertNotEqual(undefined, SyslogHandler),
+ ?assertMatch(
+ #{level := info,
+ module := syslog_logger_h,
+ filter_default := log,
+ filters := [{progress_reports, {_, stop}},
+ {rmqlog_filter, {_, #{global := info,
+ upgrade := none}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{}},
+ SyslogHandler),
+
+ UpgradeFileHandler = get_handler_by_id(Handlers, rmq_1_file_1),
+ UpgradeFile = upgrade_log_file_in_context(Context),
+ ?assertNotEqual(undefined, UpgradeFileHandler),
+ ?assertMatch(
+ #{level := info,
+ module := rabbit_logger_std_h,
+ filter_default := stop,
+ filters := [{rmqlog_filter, {_, #{upgrade := info}}}],
+ formatter := {rabbit_logger_text_fmt, _},
+ config := #{type := file,
+ file := UpgradeFile}},
+ UpgradeFileHandler),
+
+ ?assert(ping_log(rmq_1_syslog, info, Config)),
+ ?assert(ping_log(rmq_1_syslog, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL}, Config)),
+ ?assert(ping_log(rmq_1_syslog, info,
+ #{domain => ['3rd_party']}, Config)),
+ ?assertNot(ping_log(rmq_1_syslog, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE}, Config)),
+
+ ?assert(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_UPGRADE})),
+ ?assertNot(ping_log(rmq_1_file_1, info,
+ #{domain => ?RMQLOG_DOMAIN_GLOBAL})),
+ ok.
+
+%% -------------------------------------------------------------------
+%% Internal functions.
+%% -------------------------------------------------------------------
+
+default_context(Config) ->
+ LogBaseDir = ?config(log_base_dir, Config),
+ MainFile = "rabbit.log",
+ UpgradeFile = "rabbit_upgrade.log",
+ #{log_base_dir => LogBaseDir,
+ main_log_file => MainFile,
+ upgrade_log_file => UpgradeFile,
+ log_levels => undefined,
+ var_origins => #{log_base_dir => default,
+ main_log_file => default,
+ upgrade_log_file => default,
+ log_levels => default}}.
+
+main_log_file_in_context(#{log_base_dir := LogBaseDir,
+ main_log_file := MainLogFile}) ->
+ filename:join(LogBaseDir, MainLogFile).
+
+upgrade_log_file_in_context(#{log_base_dir := LogBaseDir,
+ upgrade_log_file := UpgradeLogFile}) ->
+ filename:join(LogBaseDir, UpgradeLogFile).
+
+get_handler_by_id([#{id := Id} = Handler | _], Id) ->
+ Handler;
+get_handler_by_id([_ | Rest], Id) ->
+ get_handler_by_id(Rest, Id);
+get_handler_by_id([], _) ->
+ undefined.
+
+ping_log(Id, Level) ->
+ ping_log(Id, Level, #{}, []).
+
+ping_log(Id, Level, Metadata) when is_map(Metadata) ->
+ ping_log(Id, Level, Metadata, []);
+ping_log(Id, Level, Config) when is_list(Config) ->
+ ping_log(Id, Level, #{}, Config).
+
+ping_log(Id, Level, Metadata, Config) ->
+ RandomMsg = get_random_string(
+ 32,
+ "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"),
+ ct:log("Logging \"~ts\" at level ~ts (~p)", [RandomMsg, Level, Metadata]),
+ case need_rpc(Config) of
+ false -> logger:log(Level, RandomMsg, Metadata);
+ true -> rabbit_ct_broker_helpers:rpc(
+ Config, 0,
+ logger, log, [Level, RandomMsg, Metadata])
+ end,
+ check_log(Id, Level, RandomMsg, Config).
+
+need_rpc(Config) ->
+ rabbit_ct_helpers:get_config(
+ Config, rmq_nodes_count) =/= undefined.
+
+check_log(Id, Level, RandomMsg, Config) ->
+ {ok, Handler} = case need_rpc(Config) of
+ false -> logger:get_handler_config(Id);
+ true -> rabbit_ct_broker_helpers:rpc(
+ Config, 0,
+ logger, get_handler_config, [Id])
+ end,
+ check_log1(Handler, Level, RandomMsg, Config).
+
+check_log1(#{id := Id,
+ module := rabbit_logger_std_h,
+ config := #{type := file,
+ file := Filename}},
+ _Level,
+ RandomMsg,
+ Config) ->
+ ok = case need_rpc(Config) of
+ false -> rabbit_logger_std_h:filesync(Id);
+ true -> rabbit_ct_broker_helpers:rpc(
+ Config, 0,
+ rabbit_logger_std_h, filesync, [Id])
+ end,
+ {ok, Content} = file:read_file(Filename),
+ ReOpts = [{capture, none}, multiline],
+ match =:= re:run(Content, RandomMsg ++ "$", ReOpts);
+check_log1(#{module := Mod,
+ config := #{type := Stddev}} = Handler,
+ Level,
+ RandomMsg,
+ Config)
+ when ?IS_STD_H_COMPAT(Mod) andalso ?IS_STDDEV(Stddev) ->
+ Filename = html_report_filename(Config),
+ {ColorStart, ColorEnd} = get_color_config(Handler, Level),
+ ReOpts = [{capture, none}, multiline],
+ lists:any(
+ fun(_) ->
+ {ok, Content} = file:read_file(Filename),
+ Regex =
+ "^" ++ ColorStart ++ ".+" ++ RandomMsg ++ ColorEnd ++ "$",
+ case re:run(Content, Regex, ReOpts) of
+ match -> true;
+ _ -> timer:sleep(500),
+ false
+ end
+ end, lists:seq(1, 10));
+check_log1(#{module := rabbit_logger_exchange_h},
+ _Level,
+ RandomMsg,
+ Config) ->
+ {Chan, QName} = ?config(test_channel_and_queue, Config),
+ ReOpts = [{capture, none}, multiline],
+ lists:any(
+ fun(_) ->
+ Ret = amqp_channel:call(
+ Chan, #'basic.get'{queue = QName, no_ack = false}),
+ case Ret of
+ {#'basic.get_ok'{}, #amqp_msg{payload = Content}} ->
+ case re:run(Content, RandomMsg ++ "$", ReOpts) of
+ match -> true;
+ _ -> timer:sleep(500),
+ false
+ end;
+ #'basic.get_empty'{} ->
+ timer:sleep(500),
+ false;
+ Other ->
+ io:format(standard_error, "OTHER -> ~p~n", [Other]),
+ timer:sleep(500),
+ false
+ end
+ end, lists:seq(1, 10));
+check_log1(#{module := syslog_logger_h},
+ _Level,
+ RandomMsg,
+ Config) ->
+ ReOpts = [{capture, none}, multiline],
+ lists:any(
+ fun(_) ->
+ Buffer = get_syslogd_messages(Config),
+ case re:run(Buffer, RandomMsg ++ "$", ReOpts) of
+ match -> true;
+ _ -> timer:sleep(500),
+ false
+ end
+ end, lists:seq(1, 10)).
+
+get_random_string(Length, AllowedChars) ->
+ lists:foldl(fun(_, Acc) ->
+ [lists:nth(rand:uniform(length(AllowedChars)),
+ AllowedChars)]
+ ++ Acc
+ end, [], lists:seq(1, Length)).
+
+html_report_filename(Config) ->
+ ?config(tc_logfile, Config).
+
+get_color_config(
+ #{formatter := {rabbit_logger_text_fmt,
+ #{use_colors := true,
+ color_esc_seqs := EscSeqs}}}, Level) ->
+ ColorStart = maps:get(Level, EscSeqs),
+ ColorEnd = "\033[0m",
+ {escape_for_re(ColorStart), escape_for_re(ColorEnd)};
+get_color_config(_, _) ->
+ {"", ""}.
+
+escape_for_re(String) ->
+ String1 = string:replace(String, "[", "\\[", all),
+ string:replace(String1, "]", "\\]", all).
+
+log_and_return_line(Context, Metadata) ->
+ RandomMsg = get_random_string(
+ 32,
+ "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"),
+ logger:warning(RandomMsg, Metadata),
+
+ rabbit_logger_std_h:filesync(rmq_1_file_1),
+ MainFile = main_log_file_in_context(Context),
+ {ok, Content} = file:read_file(MainFile),
+ ReOpts = [{capture, first, binary}, multiline],
+ {match, [Line]} = re:run(
+ Content,
+ "^.+" ++ RandomMsg ++ ".*$",
+ ReOpts),
+ {RandomMsg, Line}.
+
+log_and_return_json_object(Context, Metadata, DecodeOpts) ->
+ RandomMsg = get_random_string(
+ 32,
+ "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"),
+ ?LOG_INFO(RandomMsg, Metadata),
+
+ rabbit_logger_std_h:filesync(rmq_1_file_1),
+ MainFile = main_log_file_in_context(Context),
+ {ok, Content} = file:read_file(MainFile),
+ ReOpts = [{capture, first, binary}, multiline],
+ {match, [Line]} = re:run(
+ Content,
+ "^.+\"" ++ RandomMsg ++ "\".+$",
+ ReOpts),
+ Term = jsx:decode(Line, [{labels, attempt_atom} | DecodeOpts]),
+
+ {RandomMsg, Term}.
+
+%% -------------------------------------------------------------------
+%% Fake syslog server.
+%% -------------------------------------------------------------------
+
+start_fake_syslogd(Config) ->
+ Self = self(),
+ Pid = spawn(fun() -> syslogd_init(Self) end),
+ TcpPort = receive {syslogd_ready, P} -> P end,
+
+ rabbit_ct_helpers:set_config(
+ Config, [{syslogd_pid, Pid},
+ {syslogd_tcp_port, TcpPort}]).
+
+stop_fake_syslogd(Config) ->
+ Pid = ?config(syslogd_pid, Config),
+ Pid ! stop,
+ Config1 = rabbit_ct_helpers:delete_config(Config, syslogd_pid),
+ rabbit_ct_helpers:delete_config(Config1, syslogd_tcp_port).
+
+get_syslogd_messages(Config) ->
+ Pid = ?config(syslogd_pid, Config),
+ Pid ! {get_messages, self()},
+ receive {syslogd_messages, Buffer} -> Buffer end.
+
+clear_syslogd_messages(Config) ->
+ Pid = ?config(syslogd_pid, Config),
+ Pid ! clear_messages.
+
+syslogd_init(Parent) ->
+ {ok, TcpPort, LSock} = open_tcp_listening_sock(22000),
+ ct:pal(
+ "Fake syslogd ready (~p), listening on TCP port ~p",
+ [self(), TcpPort]),
+ Parent ! {syslogd_ready, TcpPort},
+ syslogd_start_loop(LSock).
+
+open_tcp_listening_sock(TcpPort) ->
+ Options = [binary,
+ {active, true}],
+ case gen_tcp:listen(TcpPort, Options) of
+ {ok, LSock} -> {ok, TcpPort, LSock};
+ {error, eaddrinuse} -> open_tcp_listening_sock(TcpPort + 1)
+ end.
+
+syslogd_start_loop(LSock) ->
+ ct:pal("Fake syslogd: accepting new connection", []),
+ {ok, Sock} = gen_tcp:accept(LSock),
+ ct:pal("Fake syslogd: accepted new connection!", []),
+ syslogd_loop(LSock, Sock, [], <<>>).
+
+syslogd_loop(LSock, Sock, Messages, Buffer) ->
+ try
+ receive
+ {tcp, Sock, NewData} ->
+ Buffer1 = <<Buffer/binary, NewData/binary>>,
+ {NewMessages, Buffer2} = parse_messages(Buffer1),
+ syslogd_loop(LSock, Sock, Messages ++ NewMessages, Buffer2);
+ {get_messages, From} ->
+ ct:pal(
+ "Fake syslogd: sending messages to ~p:~n~p",
+ [From, Messages]),
+ From ! {syslogd_messages, Messages},
+ syslogd_loop(LSock, Sock, Messages, Buffer);
+ clear_messages ->
+ ct:pal("Fake syslogd: clearing buffer", []),
+ syslogd_loop(LSock, Sock, [], Buffer);
+ {tcp_closed, Sock} ->
+ ct:pal("Fake syslogd: socket closed, restarting loop", []),
+ syslogd_start_loop(LSock);
+ stop ->
+ ct:pal("Fake syslogd: exiting", []),
+ _ = gen_tcp:close(Sock),
+ _ = gen_tcp:close(LSock);
+ Other ->
+ ct:pal("Fake syslogd: unhandled message: ~p", [Other]),
+ syslogd_loop(LSock, Sock, Messages, Buffer)
+ end
+ catch
+ C:R:S ->
+ ct:pal("~p ~p ~p", [C, R, S]),
+ throw(R)
+ end.
+
+parse_messages(Buffer) ->
+ parse_messages(Buffer, []).
+
+parse_messages(Buffer, Messages) ->
+ ReOpts = [{capture, all_but_first, binary}],
+ case re:run(Buffer, "^([0-9]+) (.*)", ReOpts) of
+ {match, [Length0, Buffer1]} ->
+ Length = list_to_integer(binary_to_list(Length0)),
+ case Buffer1 of
+ <<Message:Length/binary, Buffer2/binary>> ->
+ parse_messages(
+ Buffer2, [<<Message/binary, $\n>> | Messages]);
+ _ ->
+ {lists:reverse(Messages), Buffer}
+ end;
+ _ ->
+ {lists:reverse(Messages), Buffer}
+ end.
diff --git a/deps/rabbit/test/maintenance_mode_SUITE.erl b/deps/rabbit/test/maintenance_mode_SUITE.erl
index 3abbf9b064..8b43071bd6 100644
--- a/deps/rabbit/test/maintenance_mode_SUITE.erl
+++ b/deps/rabbit/test/maintenance_mode_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(maintenance_mode_SUITE).
@@ -10,6 +10,7 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
-compile(export_all).
@@ -24,8 +25,7 @@ groups() ->
{cluster_size_3, [], [
maintenance_mode_status,
listener_suspension_status,
- client_connection_closure,
- classic_mirrored_queue_leadership_transfer
+ client_connection_closure
]},
{quorum_queues, [], [
quorum_queue_leadership_transfer
@@ -43,10 +43,20 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
+init_per_group(quorum_queues, Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% In a mixed 3.8/3.9 cluster, unless the 3.8 node is the
+ %% one in maintenance mode, a quorum won't be available
+ %% due to mixed ra major versions
+ {skip, "test not supported in mixed version mode"};
+ _ ->
+ rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodes_count, 3}])
+ end;
init_per_group(_Group, Config) ->
- rabbit_ct_helpers:set_config(Config, [
- {rmq_nodes_count, 3}
- ]).
+ rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodes_count, 3}]).
end_per_group(_, Config) ->
Config.
@@ -211,33 +221,6 @@ client_connection_closure(Config) ->
rabbit_ct_broker_helpers:revive_node(Config, A).
-classic_mirrored_queue_leadership_transfer(Config) ->
- [A | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
- ct:pal("Picked node ~s for maintenance tests...", [A]),
-
- rabbit_ct_helpers:await_condition(
- fun () -> not rabbit_ct_broker_helpers:is_being_drained_local_read(Config, A) end, 10000),
-
- PolicyPattern = <<"^cq.mirrored">>,
- rabbit_ct_broker_helpers:set_ha_policy(Config, A, PolicyPattern, <<"all">>),
-
- Conn = rabbit_ct_client_helpers:open_connection(Config, A),
- {ok, Ch} = amqp_connection:open_channel(Conn),
- QName = <<"cq.mirrored.1">>,
- amqp_channel:call(Ch, #'queue.declare'{queue = QName, durable = true}),
-
- ?assertEqual(1, length(rabbit_ct_broker_helpers:rpc(Config, A, rabbit_amqqueue, list_local, [<<"/">>]))),
-
- rabbit_ct_broker_helpers:drain_node(Config, A),
- rabbit_ct_helpers:await_condition(
- fun () -> rabbit_ct_broker_helpers:is_being_drained_local_read(Config, A) end, 10000),
-
- ?assertEqual(0, length(rabbit_ct_broker_helpers:rpc(Config, A, rabbit_amqqueue, list_local, [<<"/">>]))),
-
- rabbit_ct_broker_helpers:revive_node(Config, A),
- %% rabbit_ct_broker_helpers:set_ha_policy/4 uses pattern for policy name
- rabbit_ct_broker_helpers:clear_policy(Config, A, PolicyPattern).
-
quorum_queue_leadership_transfer(Config) ->
[A | _] = Nodenames = rabbit_ct_broker_helpers:get_node_configs(
Config, nodename),
@@ -265,15 +248,14 @@ quorum_queue_leadership_transfer(Config) ->
Config, Nodenames),
case AllTheSame of
true ->
- rabbit_ct_helpers:await_condition(
- fun () ->
- LocalLeaders = rabbit_ct_broker_helpers:rpc(
- Config, A,
- rabbit_amqqueue,
- list_local_leaders,
- []),
- length(LocalLeaders) =:= 0
- end, 20000);
+ ?awaitMatch(
+ LocalLeaders when length(LocalLeaders) == 0,
+ rabbit_ct_broker_helpers:rpc(
+ Config, A,
+ rabbit_amqqueue,
+ list_local_leaders,
+ []),
+ 20000);
false ->
ct:pal(
?LOW_IMPORTANCE,
diff --git a/deps/rabbit/test/many_node_ha_SUITE.erl b/deps/rabbit/test/many_node_ha_SUITE.erl
index ece7dc8830..c72c3c5946 100644
--- a/deps/rabbit/test/many_node_ha_SUITE.erl
+++ b/deps/rabbit/test/many_node_ha_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(many_node_ha_SUITE).
diff --git a/deps/rabbit/test/message_size_limit_SUITE.erl b/deps/rabbit/test/message_size_limit_SUITE.erl
index f43a582c85..97a8497010 100644
--- a/deps/rabbit/test/message_size_limit_SUITE.erl
+++ b/deps/rabbit/test/message_size_limit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(message_size_limit_SUITE).
diff --git a/deps/rabbit/test/metrics_SUITE.erl b/deps/rabbit/test/metrics_SUITE.erl
index e585ccd5a8..446d54efdd 100644
--- a/deps/rabbit/test/metrics_SUITE.erl
+++ b/deps/rabbit/test/metrics_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(metrics_SUITE).
-compile(export_all).
diff --git a/deps/rabbit/test/mirrored_supervisor_SUITE.erl b/deps/rabbit/test/mirrored_supervisor_SUITE.erl
index 7ce88cfdaa..04c93c2e56 100644
--- a/deps/rabbit/test/mirrored_supervisor_SUITE.erl
+++ b/deps/rabbit/test/mirrored_supervisor_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(mirrored_supervisor_SUITE).
diff --git a/deps/rabbit/test/mirrored_supervisor_SUITE_gs.erl b/deps/rabbit/test/mirrored_supervisor_SUITE_gs.erl
index 62245231d7..16743f6a58 100644
--- a/deps/rabbit/test/mirrored_supervisor_SUITE_gs.erl
+++ b/deps/rabbit/test/mirrored_supervisor_SUITE_gs.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(mirrored_supervisor_SUITE_gs).
diff --git a/deps/rabbit/test/msg_store_SUITE.erl b/deps/rabbit/test/msg_store_SUITE.erl
index e349aa4443..6add6dcae0 100644
--- a/deps/rabbit/test/msg_store_SUITE.erl
+++ b/deps/rabbit/test/msg_store_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(msg_store_SUITE).
diff --git a/deps/rabbit/test/peer_discovery_classic_config_SUITE.erl b/deps/rabbit/test/peer_discovery_classic_config_SUITE.erl
index ddb753adf8..7ec621088c 100644
--- a/deps/rabbit/test/peer_discovery_classic_config_SUITE.erl
+++ b/deps/rabbit/test/peer_discovery_classic_config_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(peer_discovery_classic_config_SUITE).
@@ -10,6 +10,7 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
-import(rabbit_ct_broker_helpers, [
cluster_members_online/2
@@ -36,6 +37,7 @@ suite() ->
{timetrap, {minutes, 5}}
].
+-define(TIMEOUT, 120_000).
%%
%% Setup/teardown.
@@ -48,12 +50,6 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
-init_per_group(_, Config) ->
- Config.
-
-end_per_group(_, Config) ->
- Config.
-
init_per_testcase(successful_discovery = Testcase, Config) ->
Config1 = rabbit_ct_helpers:testcase_started(Config, Testcase),
@@ -73,7 +69,7 @@ init_per_testcase(successful_discovery = Testcase, Config) ->
{rabbit, [
{cluster_nodes, {NodeNamesWithHostname, disc}},
{cluster_formation, [
- {randomized_startup_delay_range, {1, 10}}
+ {internal_lock_retries, 10}
]}
]}),
rabbit_ct_helpers:run_steps(Config3,
@@ -104,7 +100,7 @@ init_per_testcase(successful_discovery_with_a_subset_of_nodes_coming_online = Te
]},
{cluster_nodes, {NodeNamesWithHostname, disc}},
{cluster_formation, [
- {randomized_startup_delay_range, {1, 10}}
+ {internal_lock_retries, 10}
]}
]}),
rabbit_ct_helpers:run_steps(Config3,
@@ -121,7 +117,7 @@ init_per_testcase(no_nodes_configured = Testcase, Config) ->
{rabbit, [
{cluster_nodes, {[], disc}},
{cluster_formation, [
- {randomized_startup_delay_range, {1, 10}}
+ {internal_lock_retries, 10}
]}
]}),
rabbit_ct_helpers:run_steps(Config3,
@@ -145,35 +141,21 @@ end_per_testcase(Testcase, Config) ->
%% Test cases
%%
successful_discovery(Config) ->
- Condition = fun() ->
- 3 =:= length(cluster_members_online(Config, 0)) andalso
- 3 =:= length(cluster_members_online(Config, 1))
- end,
- await_cluster(Config, Condition, [1, 2]).
+ ?awaitMatch(
+ {M1, M2} when length(M1) =:= 3; length(M2) =:= 3,
+ {cluster_members_online(Config, 0),
+ cluster_members_online(Config, 1)},
+ ?TIMEOUT).
successful_discovery_with_a_subset_of_nodes_coming_online(Config) ->
- Condition = fun() ->
- 2 =:= length(cluster_members_online(Config, 0)) andalso
- 2 =:= length(cluster_members_online(Config, 1))
- end,
- await_cluster(Config, Condition, [1]).
+ ?awaitMatch(
+ {M1, M2} when length(M1) =:= 2; length(M2) =:= 2,
+ {cluster_members_online(Config, 0),
+ cluster_members_online(Config, 1)},
+ ?TIMEOUT).
no_nodes_configured(Config) ->
- Condition = fun() -> length(cluster_members_online(Config, 0)) < 2 end,
- await_cluster(Config, Condition, [1]).
-
-reset_and_restart_node(Config, I) when is_integer(I) andalso I >= 0 ->
- Name = rabbit_ct_broker_helpers:get_node_config(Config, I, nodename),
- rabbit_control_helper:command(stop_app, Name),
- rabbit_ct_broker_helpers:reset_node(Config, Name),
- rabbit_control_helper:command(start_app, Name).
-
-await_cluster(Config, Condition, Nodes) ->
- try
- rabbit_ct_helpers:await_condition(Condition, 30000)
- catch
- exit:{test_case_failed, _} ->
- ct:pal(?LOW_IMPORTANCE, "Possible dead-lock; resetting/restarting these nodes: ~p", [Nodes]),
- [reset_and_restart_node(Config, N) || N <- Nodes],
- rabbit_ct_helpers:await_condition(Condition, 30000)
- end.
+ ?awaitMatch(
+ M when length(M) < 2,
+ cluster_members_online(Config, 0),
+ ?TIMEOUT).
diff --git a/deps/rabbit/test/peer_discovery_dns_SUITE.erl b/deps/rabbit/test/peer_discovery_dns_SUITE.erl
index 5184bc11eb..34faccc7e5 100644
--- a/deps/rabbit/test/peer_discovery_dns_SUITE.erl
+++ b/deps/rabbit/test/peer_discovery_dns_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(peer_discovery_dns_SUITE).
@@ -97,8 +97,12 @@ hostname_discovery_with_short_node_names(_) ->
node_discovery_with_long_node_names(_) ->
Result = rabbit_peer_discovery_dns:discover_nodes(?DISCOVERY_ENDPOINT_RECORD_A, true),
- ?assert(lists:member('ct_rabbit@dns.google', Result)).
+ ?assert(lists:member(list_to_atom(sname() ++ "@dns.google"), Result)).
node_discovery_with_short_node_names(_) ->
Result = rabbit_peer_discovery_dns:discover_nodes(?DISCOVERY_ENDPOINT_RECORD_A, false),
- ?assert(lists:member(ct_rabbit@dns, Result)).
+ ?assert(lists:member(list_to_atom(sname() ++ "@dns"), Result)).
+
+sname() ->
+ [Sname | _] = string:split(atom_to_list(erlang:node()), "@"),
+ Sname.
diff --git a/deps/rabbit/test/per_user_connection_channel_limit_SUITE.erl b/deps/rabbit/test/per_user_connection_channel_limit_SUITE.erl
index 43c860c8bd..5157f1a1d5 100644
--- a/deps/rabbit/test/per_user_connection_channel_limit_SUITE.erl
+++ b/deps/rabbit/test/per_user_connection_channel_limit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(per_user_connection_channel_limit_SUITE).
@@ -75,11 +75,24 @@ init_per_group(cluster_size_1_network, Config) ->
Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, network}]),
init_per_multinode_group(cluster_size_1_network, Config1, 1);
init_per_group(cluster_size_2_network, Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, network}]),
- init_per_multinode_group(cluster_size_2_network, Config1, 2);
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% In a mixed 3.8/3.9 cluster, changes to rabbit_core_ff.erl imply that some
+ %% feature flag related migrations cannot occur, and therefore user_limits
+ %% cannot be enabled in a 3.8/3.9 mixed cluster
+ {skip, "cluster_size_2_network is not mixed version compatible"};
+ _ ->
+ Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, network}]),
+ init_per_multinode_group(cluster_size_2_network, Config1, 2)
+ end;
init_per_group(cluster_size_2_direct, Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, direct}]),
- init_per_multinode_group(cluster_size_2_direct, Config1, 2);
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip, "cluster_size_2_network is not mixed version compatible"};
+ _ ->
+ Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, direct}]),
+ init_per_multinode_group(cluster_size_2_direct, Config1, 2)
+ end;
init_per_group(cluster_rename, Config) ->
init_per_multinode_group(cluster_rename, Config, 2).
@@ -103,9 +116,12 @@ init_per_multinode_group(Group, Config, NodeCount) ->
case EnableFF of
ok ->
Config2;
- Skip ->
+ {skip, _} = Skip ->
end_per_group(Group, Config2),
- Skip
+ Skip;
+ Other ->
+ end_per_group(Group, Config2),
+ {skip, Other}
end
end.
@@ -458,10 +474,10 @@ most_basic_cluster_connection_and_channel_count(Config) ->
?assertEqual(15, count_channels_of_user(Config, Username)),
close_channels(Chans1 ++ Chans2 ++ Chans3),
- ?awaitMatch(0, count_channels_of_user(Config, Username), 60000),
+ ?awaitMatch(0, count_channels_of_user(Config, Username), 60000, 3000),
close_connections([Conn1, Conn2, Conn3]),
- ?awaitMatch(0, count_connections_of_user(Config, Username), 60000).
+ ?awaitMatch(0, count_connections_of_user(Config, Username), 60000, 3000).
cluster_single_user_connection_and_channel_count(Config) ->
Username = proplists:get_value(rmq_username, Config),
@@ -761,7 +777,6 @@ cluster_node_list_on_node(Config) ->
end),
rabbit_ct_broker_helpers:stop_broker(Config, 1),
- await_running_node_refresh(Config, 0),
rabbit_ct_helpers:await_condition(
fun () ->
@@ -821,7 +836,7 @@ single_node_single_user_limit_with(Config, ConnLimit, ChLimit) ->
end),
close_connections([Conn1, Conn2, Conn3, Conn4, Conn5]),
- ?awaitMatch(0, count_connections_of_user(Config, Username), 60000),
+ ?awaitMatch(0, count_connections_of_user(Config, Username), 60000, 3000),
set_user_connection_and_channel_limit(Config, Username, -1, -1).
@@ -868,7 +883,7 @@ single_node_single_user_zero_limit(Config) ->
end),
close_connections([Conn1, Conn2]),
- ?awaitMatch(0, count_connections_of_user(Config, Username), 60000).
+ ?awaitMatch(0, count_connections_of_user(Config, Username), 60000, 3000).
single_node_single_user_clear_limits(Config) ->
Username = proplists:get_value(rmq_username, Config),
@@ -917,7 +932,7 @@ single_node_single_user_clear_limits(Config) ->
end),
close_connections([Conn2, Conn3, Conn4, Conn5, Conn6, Conn7]),
- ?awaitMatch(0, count_connections_of_user(Config, Username), 5000),
+ ?awaitMatch(0, count_connections_of_user(Config, Username), 5000, 1000),
set_user_connection_and_channel_limit(Config, Username, -1, -1).
@@ -1178,10 +1193,10 @@ cluster_single_user_limit(Config) ->
[Chans3, Chans4] = [open_channels(Conn, 5) || Conn <- Conns2],
close_channels(Chans2 ++ Chans3 ++ Chans4),
- ?awaitMatch(0, count_channels_of_user(Config, Username), 60000),
+ ?awaitMatch(0, count_channels_of_user(Config, Username), 60000, 3000),
close_connections([Conn2, Conn3, Conn4]),
- ?awaitMatch(0, count_connections_of_user(Config, Username), 60000),
+ ?awaitMatch(0, count_connections_of_user(Config, Username), 60000, 3000),
set_user_connection_and_channel_limit(Config, Username, -1, -1).
@@ -1225,7 +1240,7 @@ cluster_single_user_limit2(Config) ->
end),
close_connections([Conn2, Conn3, Conn4, Conn5, Conn6]),
- ?awaitMatch(0, count_connections_of_user(Config, Username), 5000),
+ ?awaitMatch(0, count_connections_of_user(Config, Username), 5000, 1000),
set_user_connection_and_channel_limit(Config, Username, -1, -1).
@@ -1477,12 +1492,12 @@ cluster_multiple_users_zero_limit(Config) ->
[Chans1, Chans2, Chans3, Chans4] = [open_channels(Conn, 5) || Conn <- Conns1],
close_channels(Chans1 ++ Chans2 ++ Chans3 ++ Chans4),
- ?awaitMatch(0, count_channels_of_user(Config, Username1), 60000),
- ?awaitMatch(0, count_channels_of_user(Config, Username2), 60000),
+ ?awaitMatch(0, count_channels_of_user(Config, Username1), 60000, 3000),
+ ?awaitMatch(0, count_channels_of_user(Config, Username2), 60000, 3000),
close_connections([Conn1, Conn2, Conn3, Conn4]),
- ?awaitMatch(0, count_connections_of_user(Config, Username1), 60000),
- ?awaitMatch(0, count_connections_of_user(Config, Username2), 60000),
+ ?awaitMatch(0, count_connections_of_user(Config, Username1), 60000, 3000),
+ ?awaitMatch(0, count_connections_of_user(Config, Username2), 60000, 3000),
set_user_connection_and_channel_limit(Config, Username1, -1, -1),
set_user_connection_and_channel_limit(Config, Username2, -1, -1).
@@ -1518,7 +1533,9 @@ open_channels(Conn, N) ->
open_channel(Conn) when is_pid(Conn) ->
try amqp_connection:open_channel(Conn) of
- {ok, Ch} -> Ch
+ {ok, Ch} -> Ch;
+ {error, _} ->
+ {error, not_allowed}
catch
_:_Error -> {error, not_allowed}
end.
@@ -1633,9 +1650,6 @@ clear_all_user_limits(Config, NodeIndex, Username) ->
ok = rabbit_ct_broker_helpers:control_action(
clear_user_limits, Node, [rabbit_data_coercion:to_list(Username), "all"]).
-await_running_node_refresh(_Config, _NodeIndex) ->
- timer:sleep(250).
-
expect_that_client_connection_is_rejected(Config) ->
expect_that_client_connection_is_rejected(Config, 0).
diff --git a/deps/rabbit/test/per_user_connection_channel_limit_partitions_SUITE.erl b/deps/rabbit/test/per_user_connection_channel_limit_partitions_SUITE.erl
index 8af68f0112..53e09bc960 100644
--- a/deps/rabbit/test/per_user_connection_channel_limit_partitions_SUITE.erl
+++ b/deps/rabbit/test/per_user_connection_channel_limit_partitions_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(per_user_connection_channel_limit_partitions_SUITE).
@@ -10,6 +10,7 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
-compile(export_all).
@@ -50,8 +51,16 @@ end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
init_per_group(net_ticktime_1 = Group, Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config, [{net_ticktime, 1}]),
- init_per_multinode_group(Group, Config1, 3).
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% In a mixed 3.8/3.9 cluster, changes to rabbit_core_ff.erl imply that some
+ %% feature flag related migrations cannot occur, and therefore user_limits
+ %% cannot be enabled in a 3.8/3.9 mixed cluster
+ {skip, "group is not mixed version compatible"};
+ _ ->
+ Config1 = rabbit_ct_helpers:set_config(Config, [{net_ticktime, 1}]),
+ init_per_multinode_group(Group, Config1, 3)
+ end.
init_per_multinode_group(Group, Config, NodeCount) ->
Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
@@ -67,9 +76,12 @@ init_per_multinode_group(Group, Config, NodeCount) ->
case EnableFF of
ok ->
Config2;
- Skip ->
+ {skip, _} = Skip ->
end_per_group(Group, Config2),
- Skip
+ Skip;
+ Other ->
+ end_per_group(Group, Config2),
+ {skip, Other}
end.
end_per_group(_Group, Config) ->
@@ -105,8 +117,10 @@ cluster_full_partition_with_autoheal(Config) ->
_Chans1 = [_|_] = open_channels(Conn1, 5),
_Chans3 = [_|_] = open_channels(Conn3, 5),
_Chans5 = [_|_] = open_channels(Conn5, 5),
- wait_for_count_connections_in(Config, Username, 6, 60000),
- ?assertEqual(15, count_channels_in(Config, Username)),
+ ?awaitMatch({6, 15},
+ {count_connections_in(Config, Username),
+ count_channels_in(Config, Username)},
+ 60000, 3000),
%% B drops off the network, non-reachable by either A or C
rabbit_ct_broker_helpers:block_traffic_between(A, B),
@@ -115,23 +129,29 @@ cluster_full_partition_with_autoheal(Config) ->
%% A and C are still connected, so 4 connections are tracked
%% All connections to B are dropped
- wait_for_count_connections_in(Config, Username, 4, 60000),
- ?assertEqual(10, count_channels_in(Config, Username)),
+ ?awaitMatch({4, 10},
+ {count_connections_in(Config, Username),
+ count_channels_in(Config, Username)},
+ 60000, 3000),
rabbit_ct_broker_helpers:allow_traffic_between(A, B),
rabbit_ct_broker_helpers:allow_traffic_between(B, C),
timer:sleep(?DELAY),
%% during autoheal B's connections were dropped
- wait_for_count_connections_in(Config, Username, 4, 60000),
- ?assertEqual(10, count_channels_in(Config, Username)),
+ ?awaitMatch({4, 10},
+ {count_connections_in(Config, Username),
+ count_channels_in(Config, Username)},
+ 60000, 3000),
lists:foreach(fun (Conn) ->
(catch rabbit_ct_client_helpers:close_connection(Conn))
end, [Conn1, Conn2, Conn3, Conn4,
Conn5, Conn6]),
- ?assertEqual(0, count_connections_in(Config, Username)),
- ?assertEqual(0, count_channels_in(Config, Username)),
+ ?awaitMatch({0, 0},
+ {count_connections_in(Config, Username),
+ count_channels_in(Config, Username)},
+ 60000, 3000),
passed.
@@ -139,19 +159,6 @@ cluster_full_partition_with_autoheal(Config) ->
%% Helpers
%% -------------------------------------------------------------------
-wait_for_count_connections_in(Config, Username, Expected, Time) when Time =< 0 ->
- ?assertMatch(Connections when length(Connections) == Expected,
- connections_in(Config, Username));
-wait_for_count_connections_in(Config, Username, Expected, Time) ->
- case connections_in(Config, Username) of
- Connections when length(Connections) == Expected ->
- ok;
- _ ->
- Sleep = 3000,
- timer:sleep(Sleep),
- wait_for_count_connections_in(Config, Username, Expected, Time - Sleep)
- end.
-
open_channels(Conn, N) ->
[begin
{ok, Ch} = amqp_connection:open_channel(Conn),
diff --git a/deps/rabbit/test/per_user_connection_channel_tracking_SUITE.erl b/deps/rabbit/test/per_user_connection_channel_tracking_SUITE.erl
index 8b4bd91d09..cc99d06ec3 100644
--- a/deps/rabbit/test/per_user_connection_channel_tracking_SUITE.erl
+++ b/deps/rabbit/test/per_user_connection_channel_tracking_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(per_user_connection_channel_tracking_SUITE).
@@ -63,8 +63,16 @@ init_per_group(cluster_size_1_network, Config) ->
Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, network}]),
init_per_multinode_group(cluster_size_1_network, Config1, 1);
init_per_group(cluster_size_2_network, Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, network}]),
- init_per_multinode_group(cluster_size_2_network, Config1, 2);
+ case rabbit_ct_helpers:is_mixed_versions() of
+ false ->
+ Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, network}]),
+ init_per_multinode_group(cluster_size_2_network, Config1, 2);
+ _ ->
+ %% In a mixed 3.8/3.9 cluster, changes to rabbit_core_ff.erl imply that some
+ %% feature flag related migrations cannot occur, and therefore user_limits
+ %% cannot be enabled in a 3.8/3.9 mixed cluster
+ {skip, "cluster_size_2_network is not mixed version compatible"}
+ end;
init_per_group(cluster_size_1_direct, Config) ->
Config1 = rabbit_ct_helpers:set_config(Config, [{connection_type, direct}]),
init_per_multinode_group(cluster_size_1_direct, Config1, 1);
@@ -86,9 +94,12 @@ init_per_multinode_group(Group, Config, NodeCount) ->
case EnableFF of
ok ->
Config2;
- Skip ->
+ {skip, _} = Skip ->
+ end_per_group(Group, Config2),
+ Skip;
+ Other ->
end_per_group(Group, Config2),
- Skip
+ {skip, Other}
end.
end_per_group(_Group, Config) ->
diff --git a/deps/rabbit/test/per_user_connection_tracking_SUITE.erl b/deps/rabbit/test/per_user_connection_tracking_SUITE.erl
index 36b0962eac..9a3243d0ac 100644
--- a/deps/rabbit/test/per_user_connection_tracking_SUITE.erl
+++ b/deps/rabbit/test/per_user_connection_tracking_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(per_user_connection_tracking_SUITE).
diff --git a/deps/rabbit/test/per_vhost_connection_limit_SUITE.erl b/deps/rabbit/test/per_vhost_connection_limit_SUITE.erl
index a140b3e829..c6ddafeb60 100644
--- a/deps/rabbit/test/per_vhost_connection_limit_SUITE.erl
+++ b/deps/rabbit/test/per_vhost_connection_limit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(per_vhost_connection_limit_SUITE).
@@ -10,6 +10,7 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
-compile(export_all).
@@ -61,6 +62,8 @@ suite() ->
%% see partitions_SUITE
-define(DELAY, 9000).
+-define(AWAIT, 1000).
+-define(INTERVAL, 250).
%% -------------------------------------------------------------------
%% Testsuite setup/teardown.
@@ -148,36 +151,36 @@ most_basic_single_node_connection_count(Config) ->
VHost = <<"/">>,
?assertEqual(0, count_connections_in(Config, VHost)),
[Conn] = open_connections(Config, [0]),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn]),
- ?assertEqual(0, count_connections_in(Config, VHost)).
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL).
single_node_single_vhost_connection_count(Config) ->
VHost = <<"/">>,
?assertEqual(0, count_connections_in(Config, VHost)),
[Conn1] = open_connections(Config, [0]),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1]),
- ?assertEqual(0, count_connections_in(Config, VHost)),
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn2] = open_connections(Config, [0]),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn3] = open_connections(Config, [0]),
- ?assertEqual(2, count_connections_in(Config, VHost)),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn4] = open_connections(Config, [0]),
- ?assertEqual(3, count_connections_in(Config, VHost)),
+ ?awaitMatch(3, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
kill_connections([Conn4]),
- ?assertEqual(2, count_connections_in(Config, VHost)),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn5] = open_connections(Config, [0]),
- ?assertEqual(3, count_connections_in(Config, VHost)),
+ ?awaitMatch(3, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn2, Conn3, Conn5]),
- ?assertEqual(0, count_connections_in(Config, VHost)).
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL).
single_node_multiple_vhosts_connection_count(Config) ->
VHost1 = <<"vhost1">>,
@@ -190,32 +193,32 @@ single_node_multiple_vhosts_connection_count(Config) ->
?assertEqual(0, count_connections_in(Config, VHost2)),
[Conn1] = open_connections(Config, [{0, VHost1}]),
- ?assertEqual(1, count_connections_in(Config, VHost1)),
+ ?awaitMatch(1, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
close_connections([Conn1]),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
[Conn2] = open_connections(Config, [{0, VHost2}]),
- ?assertEqual(1, count_connections_in(Config, VHost2)),
+ ?awaitMatch(1, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
[Conn3] = open_connections(Config, [{0, VHost1}]),
- ?assertEqual(1, count_connections_in(Config, VHost1)),
- ?assertEqual(1, count_connections_in(Config, VHost2)),
+ ?awaitMatch(1, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(1, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
[Conn4] = open_connections(Config, [{0, VHost1}]),
- ?assertEqual(2, count_connections_in(Config, VHost1)),
+ ?awaitMatch(2, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
kill_connections([Conn4]),
- ?assertEqual(1, count_connections_in(Config, VHost1)),
+ ?awaitMatch(1, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
[Conn5] = open_connections(Config, [{0, VHost2}]),
- ?assertEqual(2, count_connections_in(Config, VHost2)),
+ ?awaitMatch(2, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
[Conn6] = open_connections(Config, [{0, VHost2}]),
- ?assertEqual(3, count_connections_in(Config, VHost2)),
+ ?awaitMatch(3, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
close_connections([Conn2, Conn3, Conn5, Conn6]),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
- ?assertEqual(0, count_connections_in(Config, VHost2)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(0, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
rabbit_ct_broker_helpers:delete_vhost(Config, VHost1),
rabbit_ct_broker_helpers:delete_vhost(Config, VHost2).
@@ -231,27 +234,42 @@ single_node_list_in_vhost(Config) ->
?assertEqual(0, length(connections_in(Config, VHost2))),
[Conn1] = open_connections(Config, [{0, VHost1}]),
- [#tracked_connection{vhost = VHost1}] = connections_in(Config, VHost1),
+ ?awaitMatch([#tracked_connection{vhost = VHost1}],
+ connections_in(Config, VHost1),
+ ?AWAIT, ?INTERVAL),
close_connections([Conn1]),
- ?assertEqual(0, length(connections_in(Config, VHost1))),
+ ?awaitMatch(Connections when length(Connections) == 0,
+ connections_in(Config, VHost1),
+ ?AWAIT, ?INTERVAL),
[Conn2] = open_connections(Config, [{0, VHost2}]),
- [#tracked_connection{vhost = VHost2}] = connections_in(Config, VHost2),
+ ?awaitMatch([#tracked_connection{vhost = VHost2}],
+ connections_in(Config, VHost2),
+ ?AWAIT, ?INTERVAL),
[Conn3] = open_connections(Config, [{0, VHost1}]),
- [#tracked_connection{vhost = VHost1}] = connections_in(Config, VHost1),
+ ?awaitMatch([#tracked_connection{vhost = VHost1}],
+ connections_in(Config, VHost1),
+ ?AWAIT, ?INTERVAL),
[Conn4] = open_connections(Config, [{0, VHost1}]),
+ ?awaitMatch([#tracked_connection{vhost = VHost1},
+ #tracked_connection{vhost = VHost1}],
+ connections_in(Config, VHost1),
+ ?AWAIT, ?INTERVAL),
kill_connections([Conn4]),
- [#tracked_connection{vhost = VHost1}] = connections_in(Config, VHost1),
+ ?awaitMatch([#tracked_connection{vhost = VHost1}],
+ connections_in(Config, VHost1),
+ ?AWAIT, ?INTERVAL),
[Conn5, Conn6] = open_connections(Config, [{0, VHost2}, {0, VHost2}]),
- [<<"vhost1">>, <<"vhost2">>] =
- lists:usort(lists:map(fun (#tracked_connection{vhost = V}) -> V end,
- all_connections(Config))),
+ ?awaitMatch([<<"vhost1">>, <<"vhost2">>],
+ lists:usort(lists:map(fun (#tracked_connection{vhost = V}) -> V end,
+ all_connections(Config))),
+ ?AWAIT, ?INTERVAL),
close_connections([Conn2, Conn3, Conn5, Conn6]),
- ?assertEqual(0, length(all_connections(Config))),
+ ?awaitMatch(0, length(all_connections(Config)), ?AWAIT, ?INTERVAL),
rabbit_ct_broker_helpers:delete_vhost(Config, VHost1),
rabbit_ct_broker_helpers:delete_vhost(Config, VHost2).
@@ -260,43 +278,43 @@ most_basic_cluster_connection_count(Config) ->
VHost = <<"/">>,
?assertEqual(0, count_connections_in(Config, VHost)),
[Conn1] = open_connections(Config, [0]),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn2] = open_connections(Config, [1]),
- ?assertEqual(2, count_connections_in(Config, VHost)),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn3] = open_connections(Config, [1]),
- ?assertEqual(3, count_connections_in(Config, VHost)),
+ ?awaitMatch(3, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2, Conn3]),
- ?assertEqual(0, count_connections_in(Config, VHost)).
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL).
cluster_single_vhost_connection_count(Config) ->
VHost = <<"/">>,
?assertEqual(0, count_connections_in(Config, VHost)),
[Conn1] = open_connections(Config, [0]),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1]),
- ?assertEqual(0, count_connections_in(Config, VHost)),
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn2] = open_connections(Config, [1]),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn3] = open_connections(Config, [0]),
- ?assertEqual(2, count_connections_in(Config, VHost)),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn4] = open_connections(Config, [1]),
- ?assertEqual(3, count_connections_in(Config, VHost)),
+ ?awaitMatch(3, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
kill_connections([Conn4]),
- ?assertEqual(2, count_connections_in(Config, VHost)),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn5] = open_connections(Config, [1]),
- ?assertEqual(3, count_connections_in(Config, VHost)),
+ ?awaitMatch(3, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn2, Conn3, Conn5]),
- ?assertEqual(0, count_connections_in(Config, VHost)).
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL).
cluster_multiple_vhosts_connection_count(Config) ->
VHost1 = <<"vhost1">>,
@@ -309,32 +327,32 @@ cluster_multiple_vhosts_connection_count(Config) ->
?assertEqual(0, count_connections_in(Config, VHost2)),
[Conn1] = open_connections(Config, [{0, VHost1}]),
- ?assertEqual(1, count_connections_in(Config, VHost1)),
+ ?awaitMatch(1, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
close_connections([Conn1]),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
[Conn2] = open_connections(Config, [{1, VHost2}]),
- ?assertEqual(1, count_connections_in(Config, VHost2)),
+ ?awaitMatch(1, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
[Conn3] = open_connections(Config, [{1, VHost1}]),
- ?assertEqual(1, count_connections_in(Config, VHost1)),
- ?assertEqual(1, count_connections_in(Config, VHost2)),
+ ?awaitMatch(1, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(1, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
[Conn4] = open_connections(Config, [{0, VHost1}]),
- ?assertEqual(2, count_connections_in(Config, VHost1)),
+ ?awaitMatch(2, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
kill_connections([Conn4]),
- ?assertEqual(1, count_connections_in(Config, VHost1)),
+ ?awaitMatch(1, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
[Conn5] = open_connections(Config, [{1, VHost2}]),
- ?assertEqual(2, count_connections_in(Config, VHost2)),
+ ?awaitMatch(2, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
[Conn6] = open_connections(Config, [{0, VHost2}]),
- ?assertEqual(3, count_connections_in(Config, VHost2)),
+ ?awaitMatch(3, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
close_connections([Conn2, Conn3, Conn5, Conn6]),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
- ?assertEqual(0, count_connections_in(Config, VHost2)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(0, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
rabbit_ct_broker_helpers:delete_vhost(Config, VHost1),
rabbit_ct_broker_helpers:delete_vhost(Config, VHost2).
@@ -344,27 +362,27 @@ cluster_node_restart_connection_count(Config) ->
?assertEqual(0, count_connections_in(Config, VHost)),
[Conn1] = open_connections(Config, [0]),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1]),
- ?assertEqual(0, count_connections_in(Config, VHost)),
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn2] = open_connections(Config, [1]),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn3] = open_connections(Config, [0]),
- ?assertEqual(2, count_connections_in(Config, VHost)),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn4] = open_connections(Config, [1]),
- ?assertEqual(3, count_connections_in(Config, VHost)),
+ ?awaitMatch(3, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
[Conn5] = open_connections(Config, [1]),
- ?assertEqual(4, count_connections_in(Config, VHost)),
+ ?awaitMatch(4, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
rabbit_ct_broker_helpers:restart_broker(Config, 1),
- ?assertEqual(1, count_connections_in(Config, VHost)),
+ ?awaitMatch(1, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn2, Conn3, Conn4, Conn5]),
- ?assertEqual(0, count_connections_in(Config, VHost)).
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL).
cluster_node_list_on_node(Config) ->
[A, B] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -373,33 +391,36 @@ cluster_node_list_on_node(Config) ->
?assertEqual(0, length(connections_on_node(Config, 0))),
[Conn1] = open_connections(Config, [0]),
- [#tracked_connection{node = A}] = connections_on_node(Config, 0),
+ ?awaitMatch([#tracked_connection{node = A}],
+ connections_on_node(Config, 0),
+ ?AWAIT, ?INTERVAL),
close_connections([Conn1]),
- ?assertEqual(0, length(connections_on_node(Config, 0))),
+ ?awaitMatch(0, length(connections_on_node(Config, 0)), ?AWAIT, ?INTERVAL),
[_Conn2] = open_connections(Config, [1]),
- [#tracked_connection{node = B}] = connections_on_node(Config, 1),
+ ?awaitMatch([#tracked_connection{node = B}],
+ connections_on_node(Config, 1),
+ ?AWAIT, ?INTERVAL),
[Conn3] = open_connections(Config, [0]),
- ?assertEqual(1, length(connections_on_node(Config, 0))),
+ ?awaitMatch(1, length(connections_on_node(Config, 0)), ?AWAIT, ?INTERVAL),
[Conn4] = open_connections(Config, [1]),
- ?assertEqual(2, length(connections_on_node(Config, 1))),
+ ?awaitMatch(2, length(connections_on_node(Config, 1)), ?AWAIT, ?INTERVAL),
kill_connections([Conn4]),
- ?assertEqual(1, length(connections_on_node(Config, 1))),
+ ?awaitMatch(1, length(connections_on_node(Config, 1)), ?AWAIT, ?INTERVAL),
[Conn5] = open_connections(Config, [0]),
- ?assertEqual(2, length(connections_on_node(Config, 0))),
+ ?awaitMatch(2, length(connections_on_node(Config, 0)), ?AWAIT, ?INTERVAL),
rabbit_ct_broker_helpers:stop_broker(Config, 1),
- await_running_node_refresh(Config, 0),
- ?assertEqual(2, length(all_connections(Config))),
+ ?awaitMatch(2, length(all_connections(Config)), 1000),
?assertEqual(0, length(connections_on_node(Config, 0, B))),
close_connections([Conn3, Conn5]),
- ?assertEqual(0, length(all_connections(Config, 0))),
+ ?awaitMatch(0, length(all_connections(Config, 0)), ?AWAIT, ?INTERVAL),
rabbit_ct_broker_helpers:start_broker(Config, 1).
@@ -422,9 +443,10 @@ single_node_single_vhost_limit_with(Config, WatermarkLimit) ->
set_vhost_connection_limit(Config, VHost, WatermarkLimit),
[Conn4, Conn5] = open_connections(Config, [0, 0]),
+ ?awaitMatch(5, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2, Conn3, Conn4, Conn5]),
- ?assertEqual(0, count_connections_in(Config, VHost)),
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
set_vhost_connection_limit(Config, VHost, -1).
@@ -441,9 +463,10 @@ single_node_single_vhost_zero_limit(Config) ->
set_vhost_connection_limit(Config, VHost, -1),
[Conn1, Conn2] = open_connections(Config, [0, 0]),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2]),
- ?assertEqual(0, count_connections_in(Config, VHost)).
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL).
single_node_multiple_vhosts_limit(Config) ->
@@ -456,8 +479,8 @@ single_node_multiple_vhosts_limit(Config) ->
set_vhost_connection_limit(Config, VHost1, 2),
set_vhost_connection_limit(Config, VHost2, 2),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
- ?assertEqual(0, count_connections_in(Config, VHost2)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(0, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
[Conn1, Conn2, Conn3, Conn4] = open_connections(Config, [
{0, VHost1},
@@ -470,6 +493,9 @@ single_node_multiple_vhosts_limit(Config) ->
expect_that_client_connection_is_rejected(Config, 0, VHost2),
[Conn5] = open_connections(Config, [0]),
+ ?awaitMatch(Conns when length(Conns) == 5,
+ connections_on_node(Config, 0),
+ ?AWAIT, ?INTERVAL),
set_vhost_connection_limit(Config, VHost1, 5),
set_vhost_connection_limit(Config, VHost2, -10),
@@ -480,11 +506,14 @@ single_node_multiple_vhosts_limit(Config) ->
{0, VHost1},
{0, VHost2},
{0, VHost2}]),
+ ?awaitMatch(Conns when length(Conns) == 10,
+ connections_on_node(Config, 0),
+ ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2, Conn3, Conn4, Conn5,
Conn6, Conn7, Conn8, Conn9, Conn10]),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
- ?assertEqual(0, count_connections_in(Config, VHost2)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(0, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
set_vhost_connection_limit(Config, VHost1, -1),
set_vhost_connection_limit(Config, VHost2, -1),
@@ -503,8 +532,8 @@ single_node_multiple_vhosts_zero_limit(Config) ->
set_vhost_connection_limit(Config, VHost1, 0),
set_vhost_connection_limit(Config, VHost2, 0),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
- ?assertEqual(0, count_connections_in(Config, VHost2)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(0, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
%% with limit = 0 no connections are allowed
expect_that_client_connection_is_rejected(Config, 0, VHost1),
@@ -513,10 +542,11 @@ single_node_multiple_vhosts_zero_limit(Config) ->
set_vhost_connection_limit(Config, VHost1, -1),
[Conn1, Conn2] = open_connections(Config, [{0, VHost1}, {0, VHost1}]),
+ ?awaitMatch(2, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2]),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
- ?assertEqual(0, count_connections_in(Config, VHost2)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(0, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
set_vhost_connection_limit(Config, VHost1, -1),
set_vhost_connection_limit(Config, VHost2, -1).
@@ -530,6 +560,7 @@ cluster_single_vhost_limit(Config) ->
%% here connections are opened to different nodes
[Conn1, Conn2] = open_connections(Config, [{0, VHost}, {1, VHost}]),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
%% we've crossed the limit
expect_that_client_connection_is_rejected(Config, 0, VHost),
@@ -538,9 +569,10 @@ cluster_single_vhost_limit(Config) ->
set_vhost_connection_limit(Config, VHost, 5),
[Conn3, Conn4] = open_connections(Config, [{0, VHost}, {0, VHost}]),
+ ?awaitMatch(4, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2, Conn3, Conn4]),
- ?assertEqual(0, count_connections_in(Config, VHost)),
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
set_vhost_connection_limit(Config, VHost, -1).
@@ -552,6 +584,7 @@ cluster_single_vhost_limit2(Config) ->
%% here a limit is reached on one node first
[Conn1, Conn2] = open_connections(Config, [{0, VHost}, {0, VHost}]),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
%% we've crossed the limit
expect_that_client_connection_is_rejected(Config, 0, VHost),
@@ -564,9 +597,10 @@ cluster_single_vhost_limit2(Config) ->
{1, VHost},
{1, VHost},
{1, VHost}]),
+ ?awaitMatch(5, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2, Conn3, Conn4, Conn5]),
- ?assertEqual(0, count_connections_in(Config, VHost)),
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
set_vhost_connection_limit(Config, VHost, -1).
@@ -584,9 +618,10 @@ cluster_single_vhost_zero_limit(Config) ->
set_vhost_connection_limit(Config, VHost, -1),
[Conn1, Conn2, Conn3, Conn4] = open_connections(Config, [0, 1, 0, 1]),
+ ?awaitMatch(4, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2, Conn3, Conn4]),
- ?assertEqual(0, count_connections_in(Config, VHost)),
+ ?awaitMatch(0, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
set_vhost_connection_limit(Config, VHost, -1).
@@ -618,10 +653,12 @@ cluster_multiple_vhosts_zero_limit(Config) ->
{0, VHost2},
{1, VHost1},
{1, VHost2}]),
+ ?awaitMatch(2, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(2, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2, Conn3, Conn4]),
- ?assertEqual(0, count_connections_in(Config, VHost1)),
- ?assertEqual(0, count_connections_in(Config, VHost2)),
+ ?awaitMatch(0, count_connections_in(Config, VHost1), ?AWAIT, ?INTERVAL),
+ ?awaitMatch(0, count_connections_in(Config, VHost2), ?AWAIT, ?INTERVAL),
set_vhost_connection_limit(Config, VHost1, -1),
set_vhost_connection_limit(Config, VHost2, -1).
@@ -637,16 +674,16 @@ vhost_limit_after_node_renamed(Config) ->
[Conn1, Conn2, {error, not_allowed}] = open_connections(Config,
[{0, VHost}, {1, VHost}, {0, VHost}]),
- ?assertEqual(2, count_connections_in(Config, VHost)),
+ ?awaitMatch(2, count_connections_in(Config, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn1, Conn2]),
Config1 = cluster_rename_SUITE:stop_rename_start(Config, A, [A, 'new-A']),
- ?assertEqual(0, count_connections_in(Config1, VHost)),
+ ?awaitMatch(0, count_connections_in(Config1, VHost), ?AWAIT, ?INTERVAL),
- [Conn3, Conn4, {error, not_allowed}] = open_connections(Config,
+ [Conn3, Conn4, {error, not_allowed}] = open_connections(Config1,
[{0, VHost}, {1, VHost}, {0, VHost}]),
- ?assertEqual(2, count_connections_in(Config1, VHost)),
+ ?awaitMatch(2, count_connections_in(Config1, VHost), ?AWAIT, ?INTERVAL),
close_connections([Conn3, Conn4]),
set_vhost_connection_limit(Config1, VHost, -1),
@@ -669,22 +706,19 @@ open_connections(Config, NodesAndVHosts) ->
(Node) ->
rabbit_ct_client_helpers:OpenConnectionFun(Config, Node)
end, NodesAndVHosts),
- timer:sleep(500),
Conns.
close_connections(Conns) ->
lists:foreach(fun
(Conn) ->
rabbit_ct_client_helpers:close_connection(Conn)
- end, Conns),
- timer:sleep(500).
+ end, Conns).
kill_connections(Conns) ->
lists:foreach(fun
(Conn) ->
(catch exit(Conn, please_terminate))
- end, Conns),
- timer:sleep(500).
+ end, Conns).
count_connections_in(Config, VHost) ->
count_connections_in(Config, VHost, 0).
@@ -736,9 +770,6 @@ set_vhost_connection_limit(Config, NodeIndex, VHost, Count) ->
["{\"max-connections\": " ++ integer_to_list(Count) ++ "}"],
[{"-p", binary_to_list(VHost)}]).
-await_running_node_refresh(_Config, _NodeIndex) ->
- timer:sleep(250).
-
expect_that_client_connection_is_rejected(Config) ->
expect_that_client_connection_is_rejected(Config, 0).
diff --git a/deps/rabbit/test/per_vhost_connection_limit_partitions_SUITE.erl b/deps/rabbit/test/per_vhost_connection_limit_partitions_SUITE.erl
index 2748d95592..4b2f3c491c 100644
--- a/deps/rabbit/test/per_vhost_connection_limit_partitions_SUITE.erl
+++ b/deps/rabbit/test/per_vhost_connection_limit_partitions_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(per_vhost_connection_limit_partitions_SUITE).
@@ -10,6 +10,7 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
-compile(export_all).
@@ -94,7 +95,9 @@ cluster_full_partition_with_autoheal(Config) ->
Conn4 = open_unmanaged_connection(Config, B),
Conn5 = open_unmanaged_connection(Config, C),
Conn6 = open_unmanaged_connection(Config, C),
- wait_for_count_connections_in(Config, VHost, 6, 60000),
+ ?awaitMatch(Connections when length(Connections) == 6,
+ connections_in(Config, VHost),
+ 60000, 3000),
%% B drops off the network, non-reachable by either A or C
rabbit_ct_broker_helpers:block_traffic_between(A, B),
@@ -102,14 +105,18 @@ cluster_full_partition_with_autoheal(Config) ->
timer:sleep(?DELAY),
%% A and C are still connected, so 4 connections are tracked
- wait_for_count_connections_in(Config, VHost, 4, 60000),
+ ?awaitMatch(Connections when length(Connections) == 4,
+ connections_in(Config, VHost),
+ 60000, 3000),
rabbit_ct_broker_helpers:allow_traffic_between(A, B),
rabbit_ct_broker_helpers:allow_traffic_between(B, C),
timer:sleep(?DELAY),
%% during autoheal B's connections were dropped
- wait_for_count_connections_in(Config, VHost, 4, 60000),
+ ?awaitMatch(Connections when length(Connections) == 4,
+ connections_in(Config, VHost),
+ 60000, 3000),
lists:foreach(fun (Conn) ->
(catch rabbit_ct_client_helpers:close_connection(Conn))
@@ -122,19 +129,6 @@ cluster_full_partition_with_autoheal(Config) ->
%% Helpers
%% -------------------------------------------------------------------
-wait_for_count_connections_in(Config, VHost, Expected, Time) when Time =< 0 ->
- ?assertMatch(Connections when length(Connections) == Expected,
- connections_in(Config, VHost));
-wait_for_count_connections_in(Config, VHost, Expected, Time) ->
- case connections_in(Config, VHost) of
- Connections when length(Connections) == Expected ->
- ok;
- _ ->
- Sleep = 3000,
- timer:sleep(Sleep),
- wait_for_count_connections_in(Config, VHost, Expected, Time - Sleep)
- end.
-
count_connections_in(Config, VHost) ->
count_connections_in(Config, VHost, 0).
count_connections_in(Config, VHost, NodeIndex) ->
diff --git a/deps/rabbit/test/per_vhost_msg_store_SUITE.erl b/deps/rabbit/test/per_vhost_msg_store_SUITE.erl
index 8364d69462..ea5e98996d 100644
--- a/deps/rabbit/test/per_vhost_msg_store_SUITE.erl
+++ b/deps/rabbit/test/per_vhost_msg_store_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(per_vhost_msg_store_SUITE).
diff --git a/deps/rabbit/test/per_vhost_queue_limit_SUITE.erl b/deps/rabbit/test/per_vhost_queue_limit_SUITE.erl
index 28a9f98537..da13f40e19 100644
--- a/deps/rabbit/test/per_vhost_queue_limit_SUITE.erl
+++ b/deps/rabbit/test/per_vhost_queue_limit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(per_vhost_queue_limit_SUITE).
diff --git a/deps/rabbit/test/policy_SUITE.erl b/deps/rabbit/test/policy_SUITE.erl
index ce68332d77..8b05d73de5 100644
--- a/deps/rabbit/test/policy_SUITE.erl
+++ b/deps/rabbit/test/policy_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(policy_SUITE).
diff --git a/deps/rabbit/test/priority_queue_SUITE.erl b/deps/rabbit/test/priority_queue_SUITE.erl
index a0c1732ffd..dba04a967d 100644
--- a/deps/rabbit/test/priority_queue_SUITE.erl
+++ b/deps/rabbit/test/priority_queue_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(priority_queue_SUITE).
@@ -15,40 +15,29 @@
all() ->
[
- {group, cluster_size_2},
- {group, cluster_size_3}
+ {group, single_node}
].
groups() ->
[
- {cluster_size_2, [], [
- ackfold,
- drop,
- {overflow_reject_publish, [], [reject]},
- {overflow_reject_publish_dlx, [], [reject]},
- dropwhile_fetchwhile,
- info_head_message_timestamp,
- matching,
- mirror_queue_sync,
- mirror_queue_sync_priority_above_max,
- mirror_queue_sync_priority_above_max_pending_ack,
- mirror_queue_sync_order,
- purge,
- requeue,
- resume,
- simple_order,
- straight_through,
- invoke,
- gen_server2_stats,
- negative_max_priorities,
- max_priorities_above_hard_limit
- ]},
- {cluster_size_3, [], [
- mirror_queue_auto_ack,
- mirror_fast_reset_policy,
- mirror_reset_policy,
- mirror_stop_pending_followers
- ]}
+ {single_node, [], [
+ ackfold,
+ drop,
+ {overflow_reject_publish, [], [reject]},
+ {overflow_reject_publish_dlx, [], [reject]},
+ dropwhile_fetchwhile,
+ info_head_message_timestamp,
+ matching,
+ purge,
+ requeue,
+ resume,
+ simple_order,
+ straight_through,
+ invoke,
+ gen_server2_stats,
+ negative_max_priorities,
+ max_priorities_above_hard_limit
+ ]}
].
%% -------------------------------------------------------------------
@@ -62,21 +51,12 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
-init_per_group(cluster_size_2, Config) ->
+init_per_group(single_node, Config) ->
Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
Config1 = rabbit_ct_helpers:set_config(Config, [
- {rmq_nodes_count, 2},
- {rmq_nodename_suffix, Suffix}
- ]),
- rabbit_ct_helpers:run_steps(Config1,
- rabbit_ct_broker_helpers:setup_steps() ++
- rabbit_ct_client_helpers:setup_steps());
-init_per_group(cluster_size_3, Config) ->
- Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
- Config1 = rabbit_ct_helpers:set_config(Config, [
- {rmq_nodes_count, 3},
- {rmq_nodename_suffix, Suffix}
- ]),
+ {rmq_nodes_count, 1},
+ {rmq_nodename_suffix, Suffix}
+ ]),
rabbit_ct_helpers:run_steps(Config1,
rabbit_ct_broker_helpers:setup_steps() ++
rabbit_ct_client_helpers:setup_steps());
@@ -430,199 +410,6 @@ ram_duration(_Config) ->
PQ:delete_and_terminate(a_whim, BQS5),
passed.
-mirror_queue_sync(Config) ->
- {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, 0),
- Q = <<"mirror_queue_sync-queue">>,
- declare(Ch, Q, 3),
- publish(Ch, Q, [1, 2, 3]),
- ok = rabbit_ct_broker_helpers:set_ha_policy(Config, 0,
- <<"^mirror_queue_sync-queue$">>, <<"all">>),
- publish(Ch, Q, [1, 2, 3, 1, 2, 3]),
- %% master now has 9, mirror 6.
- get_partial(Ch, Q, manual_ack, [3, 3, 3, 2, 2, 2]),
- %% So some but not all are unacked at the mirror
- Nodename0 = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- rabbit_ct_broker_helpers:control_action(sync_queue, Nodename0,
- [binary_to_list(Q)], [{"-p", "/"}]),
- wait_for_sync(Config, Nodename0, rabbit_misc:r(<<"/">>, queue, Q)),
- rabbit_ct_client_helpers:close_connection(Conn),
- passed.
-
-mirror_queue_sync_priority_above_max(Config) ->
- A = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- %% Tests synchronisation of mirrors when priority is higher than max priority.
- %% This causes an infinity loop (and test timeout) before rabbitmq-server-795
- {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, A),
- Q = <<"mirror_queue_sync_priority_above_max-queue">>,
- declare(Ch, Q, 3),
- publish(Ch, Q, [5, 5, 5]),
- ok = rabbit_ct_broker_helpers:set_ha_policy(Config, A,
- <<".*">>, <<"all">>),
- rabbit_ct_broker_helpers:control_action(sync_queue, A,
- [binary_to_list(Q)], [{"-p", "/"}]),
- wait_for_sync(Config, A, rabbit_misc:r(<<"/">>, queue, Q)),
- delete(Ch, Q),
- rabbit_ct_client_helpers:close_connection(Conn),
- passed.
-
-mirror_queue_sync_priority_above_max_pending_ack(Config) ->
- [A, B] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
- %% Tests synchronisation of mirrors when priority is higher than max priority
- %% and there are pending acks.
- %% This causes an infinity loop (and test timeout) before rabbitmq-server-795
- {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, A),
- Q = <<"mirror_queue_sync_priority_above_max_pending_ack-queue">>,
- declare(Ch, Q, 3),
- publish(Ch, Q, [5, 5, 5]),
- %% Consume but 'forget' to acknowledge
- get_without_ack(Ch, Q),
- get_without_ack(Ch, Q),
- ok = rabbit_ct_broker_helpers:set_ha_policy(Config, A,
- <<".*">>, <<"all">>),
- rabbit_ct_broker_helpers:control_action(sync_queue, A,
- [binary_to_list(Q)], [{"-p", "/"}]),
- wait_for_sync(Config, A, rabbit_misc:r(<<"/">>, queue, Q)),
- synced_msgs(Config, A, rabbit_misc:r(<<"/">>, queue, Q), 3),
- synced_msgs(Config, B, rabbit_misc:r(<<"/">>, queue, Q), 3),
- delete(Ch, Q),
- rabbit_ct_client_helpers:close_connection(Conn),
- passed.
-
-mirror_queue_auto_ack(Config) ->
- A = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- %% Check correct use of AckRequired in the notifications to the mirrors.
- %% If mirrors are notified with AckRequired == true when it is false,
- %% the mirrors will crash with the depth notification as they will not
- %% match the master delta.
- %% Bug rabbitmq-server 687
- {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, A),
- Q = <<"mirror_queue_auto_ack-queue">>,
- declare(Ch, Q, 3),
- publish(Ch, Q, [1, 2, 3]),
- ok = rabbit_ct_broker_helpers:set_ha_policy(Config, A,
- <<".*">>, <<"all">>),
- get_partial(Ch, Q, no_ack, [3, 2, 1]),
-
- %% Retrieve mirrors
- SPids = slave_pids(Config, A, rabbit_misc:r(<<"/">>, queue, Q)),
- [{SNode1, _SPid1}, {SNode2, SPid2}] = nodes_and_pids(SPids),
-
- %% Restart one of the mirrors so `request_depth` is triggered
- rabbit_ct_broker_helpers:restart_node(Config, SNode1),
-
- %% The alive mirror must have the same pid after its neighbour is restarted
- timer:sleep(3000), %% ugly but we can't know when the `depth` instruction arrives
- Slaves = nodes_and_pids(slave_pids(Config, A, rabbit_misc:r(<<"/">>, queue, Q))),
- SPid2 = proplists:get_value(SNode2, Slaves),
-
- delete(Ch, Q),
- rabbit_ct_client_helpers:close_channel(Ch),
- rabbit_ct_client_helpers:close_connection(Conn),
- passed.
-
-mirror_queue_sync_order(Config) ->
- A = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- B = rabbit_ct_broker_helpers:get_node_config(Config, 1, nodename),
- {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, A),
- {Conn2, Ch2} = rabbit_ct_client_helpers:open_connection_and_channel(Config, B),
- Q = <<"mirror_queue_sync_order-queue">>,
- declare(Ch, Q, 3),
- publish_payload(Ch, Q, [{1, <<"msg1">>}, {2, <<"msg2">>},
- {2, <<"msg3">>}, {2, <<"msg4">>},
- {3, <<"msg5">>}]),
- rabbit_ct_client_helpers:close_channel(Ch),
-
- %% Add and sync mirror
- ok = rabbit_ct_broker_helpers:set_ha_policy(
- Config, A, <<"^mirror_queue_sync_order-queue$">>, <<"all">>),
- rabbit_ct_broker_helpers:control_action(sync_queue, A,
- [binary_to_list(Q)], [{"-p", "/"}]),
- wait_for_sync(Config, A, rabbit_misc:r(<<"/">>, queue, Q)),
-
- %% Stop the master
- rabbit_ct_broker_helpers:stop_node(Config, A),
-
- get_payload(Ch2, Q, do_ack, [<<"msg5">>, <<"msg2">>, <<"msg3">>,
- <<"msg4">>, <<"msg1">>]),
-
- delete(Ch2, Q),
- rabbit_ct_broker_helpers:start_node(Config, A),
- rabbit_ct_client_helpers:close_connection(Conn),
- rabbit_ct_client_helpers:close_connection(Conn2),
- passed.
-
-mirror_reset_policy(Config) ->
- %% Gives time to the master to go through all stages.
- %% Might eventually trigger some race conditions from #802,
- %% although for that I would expect a longer run and higher
- %% number of messages in the system.
- mirror_reset_policy(Config, 5000).
-
-mirror_fast_reset_policy(Config) ->
- %% This test seems to trigger the bug tested in invoke/1, but it
- %% cannot guarantee it will always happen. Thus, both tests
- %% should stay in the test suite.
- mirror_reset_policy(Config, 5).
-
-
-mirror_reset_policy(Config, Wait) ->
- A = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, A),
- Q = <<"mirror_reset_policy-queue">>,
- declare(Ch, Q, 5),
- Pid = queue_pid(Config, A, rabbit_misc:r(<<"/">>, queue, Q)),
- publish_many(Ch, Q, 20000),
- [begin
- rabbit_ct_broker_helpers:set_ha_policy(
- Config, A, <<"^mirror_reset_policy-queue$">>, <<"all">>,
- [{<<"ha-sync-mode">>, <<"automatic">>}]),
- timer:sleep(Wait),
- rabbit_ct_broker_helpers:clear_policy(
- Config, A, <<"^mirror_reset_policy-queue$">>),
- timer:sleep(Wait)
- end || _ <- lists:seq(1, 10)],
- timer:sleep(1000),
- ok = rabbit_ct_broker_helpers:set_ha_policy(
- Config, A, <<"^mirror_reset_policy-queue$">>, <<"all">>,
- [{<<"ha-sync-mode">>, <<"automatic">>}]),
- wait_for_sync(Config, A, rabbit_misc:r(<<"/">>, queue, Q), 2),
- %% Verify master has not crashed
- Pid = queue_pid(Config, A, rabbit_misc:r(<<"/">>, queue, Q)),
- delete(Ch, Q),
-
- rabbit_ct_client_helpers:close_connection(Conn),
- passed.
-
-mirror_stop_pending_followers(Config) ->
- A = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- B = rabbit_ct_broker_helpers:get_node_config(Config, 1, nodename),
- C = rabbit_ct_broker_helpers:get_node_config(Config, 2, nodename),
-
- [ok = rabbit_ct_broker_helpers:rpc(
- Config, Nodename, application, set_env, [rabbit, slave_wait_timeout, 0]) || Nodename <- [A, B, C]],
-
- {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, A),
- Q = <<"mirror_stop_pending_followers-queue">>,
- declare(Ch, Q, 5),
- publish_many(Ch, Q, 20000),
-
- [begin
- rabbit_ct_broker_helpers:set_ha_policy(
- Config, A, <<"^mirror_stop_pending_followers-queue$">>, <<"all">>,
- [{<<"ha-sync-mode">>, <<"automatic">>}]),
- wait_for_sync(Config, A, rabbit_misc:r(<<"/">>, queue, Q), 2),
- rabbit_ct_broker_helpers:clear_policy(
- Config, A, <<"^mirror_stop_pending_followers-queue$">>)
- end || _ <- lists:seq(1, 15)],
-
- delete(Ch, Q),
-
- [ok = rabbit_ct_broker_helpers:rpc(
- Config, Nodename, application, set_env, [rabbit, slave_wait_timeout, 15000]) || Nodename <- [A, B, C]],
-
- rabbit_ct_client_helpers:close_connection(Conn),
- passed.
-
%%----------------------------------------------------------------------------
declare(Ch, Q, Args) when is_list(Args) ->
@@ -723,43 +510,6 @@ priority2bin(Int) -> list_to_binary(integer_to_list(Int)).
%%----------------------------------------------------------------------------
-wait_for_sync(Config, Nodename, Q) ->
- wait_for_sync(Config, Nodename, Q, 1).
-
-wait_for_sync(Config, Nodename, Q, Nodes) ->
- wait_for_sync(Config, Nodename, Q, Nodes, 600).
-
-wait_for_sync(_, _, _, _, 0) ->
- throw(sync_timeout);
-wait_for_sync(Config, Nodename, Q, Nodes, N) ->
- case synced(Config, Nodename, Q, Nodes) of
- true -> ok;
- false -> timer:sleep(100),
- wait_for_sync(Config, Nodename, Q, Nodes, N-1)
- end.
-
-synced(Config, Nodename, Q, Nodes) ->
- Info = rabbit_ct_broker_helpers:rpc(Config, Nodename,
- rabbit_amqqueue, info_all, [<<"/">>, [name, synchronised_slave_pids]]),
- [SSPids] = [Pids || [{name, Q1}, {synchronised_slave_pids, Pids}] <- Info,
- Q =:= Q1],
- length(SSPids) =:= Nodes.
-
-synced_msgs(Config, Nodename, Q, Expected) ->
- Info = rabbit_ct_broker_helpers:rpc(Config, Nodename,
- rabbit_amqqueue, info_all, [<<"/">>, [name, messages]]),
- [M] = [M || [{name, Q1}, {messages, M}] <- Info, Q =:= Q1],
- M =:= Expected.
-
-nodes_and_pids(SPids) ->
- lists:zip([node(S) || S <- SPids], SPids).
-
-slave_pids(Config, Nodename, Q) ->
- Info = rabbit_ct_broker_helpers:rpc(Config, Nodename,
- rabbit_amqqueue, info_all, [<<"/">>, [name, slave_pids]]),
- [SPids] = [SPids || [{name, Q1}, {slave_pids, SPids}] <- Info,
- Q =:= Q1],
- SPids.
queue_pid(Config, Nodename, Q) ->
Info = rabbit_ct_broker_helpers:rpc(
diff --git a/deps/rabbit/test/priority_queue_recovery_SUITE.erl b/deps/rabbit/test/priority_queue_recovery_SUITE.erl
index 9679fb0449..732efbe9bd 100644
--- a/deps/rabbit/test/priority_queue_recovery_SUITE.erl
+++ b/deps/rabbit/test/priority_queue_recovery_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(priority_queue_recovery_SUITE).
diff --git a/deps/rabbit/test/product_info_SUITE.erl b/deps/rabbit/test/product_info_SUITE.erl
index 207f9222d1..7899d62572 100644
--- a/deps/rabbit/test/product_info_SUITE.erl
+++ b/deps/rabbit/test/product_info_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(product_info_SUITE).
diff --git a/deps/rabbit/test/proxy_protocol_SUITE.erl b/deps/rabbit/test/proxy_protocol_SUITE.erl
index 92c29b6063..c90e3efca8 100644
--- a/deps/rabbit/test/proxy_protocol_SUITE.erl
+++ b/deps/rabbit/test/proxy_protocol_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(proxy_protocol_SUITE).
@@ -64,7 +64,7 @@ proxy_protocol(Config) ->
{ok, _Packet} = gen_tcp:recv(Socket, 0, ?TIMEOUT),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
gen_tcp:close(Socket),
ok.
@@ -79,7 +79,7 @@ proxy_protocol_tls(Config) ->
{ok, _Packet} = ssl:recv(SslSocket, 0, ?TIMEOUT),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
gen_tcp:close(Socket),
ok.
diff --git a/deps/rabbit/test/publisher_confirms_parallel_SUITE.erl b/deps/rabbit/test/publisher_confirms_parallel_SUITE.erl
index f79fcae3ce..6d2e515da3 100644
--- a/deps/rabbit/test/publisher_confirms_parallel_SUITE.erl
+++ b/deps/rabbit/test/publisher_confirms_parallel_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(publisher_confirms_parallel_SUITE).
@@ -29,6 +29,7 @@ groups() ->
confirm_nowait,
confirm_ack,
confirm_acks,
+ confirm_after_mandatory_bug,
confirm_mandatory_unroutable,
confirm_unroutable_message],
[
@@ -187,6 +188,17 @@ confirm_acks(Config) ->
publish(Ch, QName, [<<"msg1">>, <<"msg2">>, <<"msg3">>, <<"msg4">>]),
receive_many(lists:seq(1, 4)).
+confirm_after_mandatory_bug(Config) ->
+ {_Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, 0),
+ QName = ?config(queue_name, Config),
+ declare_queue(Ch, Config, QName),
+ ok = amqp_channel:call(Ch, #'basic.publish'{routing_key = QName,
+ mandatory = true}, #amqp_msg{payload = <<"msg1">>}),
+ #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
+ publish(Ch, QName, [<<"msg2">>]),
+ true = amqp_channel:wait_for_confirms(Ch, 1),
+ ok.
+
%% For unroutable messages, the broker will issue a confirm once the exchange verifies a message
%% won't route to any queue (returns an empty list of queues).
%% If the message is also published as mandatory, the basic.return is sent to the client before
diff --git a/deps/rabbit/test/queue_length_limits_SUITE.erl b/deps/rabbit/test/queue_length_limits_SUITE.erl
index b86f502869..fe0436d6c5 100644
--- a/deps/rabbit/test/queue_length_limits_SUITE.erl
+++ b/deps/rabbit/test/queue_length_limits_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(queue_length_limits_SUITE).
diff --git a/deps/rabbit/test/queue_master_location_SUITE.erl b/deps/rabbit/test/queue_master_location_SUITE.erl
index fab3eac3f0..a4801b2e13 100644
--- a/deps/rabbit/test/queue_master_location_SUITE.erl
+++ b/deps/rabbit/test/queue_master_location_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(queue_master_location_SUITE).
@@ -67,9 +67,17 @@ groups() ->
%% Test suite setup/teardown
%% -------------------------------------------------------------------
+merge_app_env(Config) ->
+ rabbit_ct_helpers:merge_app_env(Config,
+ {rabbit, [
+ {collect_statistics, fine},
+ {collect_statistics_interval, 500}
+ ]}).
init_per_suite(Config) ->
rabbit_ct_helpers:log_environment(),
- rabbit_ct_helpers:run_setup_steps(Config).
+ rabbit_ct_helpers:run_setup_steps(Config,
+ [ fun merge_app_env/1 ] ++
+ rabbit_ct_broker_helpers:setup_steps()).
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
diff --git a/deps/rabbit/test/queue_parallel_SUITE.erl b/deps/rabbit/test/queue_parallel_SUITE.erl
index 6f813512f4..f8eb9191aa 100644
--- a/deps/rabbit/test/queue_parallel_SUITE.erl
+++ b/deps/rabbit/test/queue_parallel_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%%
-module(queue_parallel_SUITE).
@@ -50,17 +50,21 @@ groups() ->
delete_immediately_by_resource
],
[
- {parallel_tests, [],
- [
+ {parallel_tests, [], [
{classic_queue, [parallel], AllTests ++ [delete_immediately_by_pid_succeeds,
trigger_message_store_compaction]},
{mirrored_queue, [parallel], AllTests ++ [delete_immediately_by_pid_succeeds,
trigger_message_store_compaction]},
- {quorum_queue, [parallel], AllTests ++ [delete_immediately_by_pid_fails]},
+ {quorum_queue, [parallel], AllTests ++ [
+ delete_immediately_by_pid_fails,
+ extra_bcc_option
+ ]},
{quorum_queue_in_memory_limit, [parallel], AllTests ++ [delete_immediately_by_pid_fails]},
{quorum_queue_in_memory_bytes, [parallel], AllTests ++ [delete_immediately_by_pid_fails]},
{stream_queue, [parallel], [publish,
- subscribe]}
+ subscribe,
+ extra_bcc_option]}
+
]}
].
@@ -667,9 +671,30 @@ delete_immediately_by_resource(Config) ->
rabbit_ct_client_helpers:close_channel(Ch),
ok.
+extra_bcc_option(Config) ->
+ {_, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, 0),
+ QName = <<"a_queue_with_extra_bcc">>,
+ delete_queue(Ch, QName),
+ declare_queue(Ch, Config, QName),
+
+ ExtraBCC = <<"extra.bcc">>,
+ delete_queue(Ch, ExtraBCC),
+ declare_bcc_queue(Ch, ExtraBCC),
+ set_queue_options(Config, QName, #{
+ extra_bcc => ExtraBCC
+ }),
+
+ publish(Ch, QName, [<<"msg1">>, <<"msg2">>, <<"msg3">>]),
+ wait_for_messages(Config, [[QName, <<"3">>, <<"3">>, <<"0">>]]),
+ wait_for_messages(Config, [[ExtraBCC, <<"3">>, <<"3">>, <<"0">>]]),
+
+ delete_queue(Ch, QName),
+ delete_queue(Ch, ExtraBCC).
+
%%%%%%%%%%%%%%%%%%%%%%%%
%% Test helpers
%%%%%%%%%%%%%%%%%%%%%%%%
+
declare_queue(Ch, Config, QName) ->
Args = ?config(queue_args, Config),
Durable = ?config(queue_durable, Config),
@@ -677,6 +702,13 @@ declare_queue(Ch, Config, QName) ->
arguments = Args,
durable = Durable}).
+declare_bcc_queue(Ch, QName) ->
+ #'queue.declare_ok'{} = amqp_channel:call(Ch, #'queue.declare'{queue = QName,
+ durable = true}).
+
+delete_queue(Ch, QName) ->
+ #'queue.delete_ok'{} = amqp_channel:call(Ch, #'queue.delete'{queue = QName}).
+
publish(Ch, QName, Payloads) ->
[amqp_channel:call(Ch, #'basic.publish'{routing_key = QName}, #amqp_msg{payload = Payload})
|| Payload <- Payloads].
@@ -723,3 +755,14 @@ flush(T) ->
after T ->
ok
end.
+
+set_queue_options(Config, QName, Options) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, set_queue_options1, [QName, Options]).
+
+set_queue_options1(QName, Options) ->
+ rabbit_misc:execute_mnesia_transaction(fun() ->
+ rabbit_amqqueue:update(rabbit_misc:r(<<"/">>, queue, QName),
+ fun(Q) ->
+ amqqueue:set_options(Q, Options)
+ end)
+ end).
diff --git a/deps/rabbit/test/queue_type_SUITE.erl b/deps/rabbit/test/queue_type_SUITE.erl
index aed5ad4ccb..8e4c2a39fa 100644
--- a/deps/rabbit/test/queue_type_SUITE.erl
+++ b/deps/rabbit/test/queue_type_SUITE.erl
@@ -122,22 +122,14 @@ smoke(Config) ->
?config(queue_type, Config)}])),
#'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
amqp_channel:register_confirm_handler(Ch, self()),
- publish(Ch, QName, <<"msg1">>),
- ct:pal("waiting for confirms from ~s", [QName]),
- ok = receive
- #'basic.ack'{} -> ok;
- #'basic.nack'{} -> fail
- after 2500 ->
- flush(),
- exit(confirm_timeout)
- end,
+ publish_and_confirm(Ch, QName, <<"msg1">>),
DTag = basic_get(Ch, QName),
basic_ack(Ch, DTag),
basic_get_empty(Ch, QName),
%% consume
- publish(Ch, QName, <<"msg2">>),
+ publish_and_confirm(Ch, QName, <<"msg2">>),
ConsumerTag1 = <<"ctag1">>,
ok = subscribe(Ch, QName, ConsumerTag1),
%% receive and ack
@@ -158,7 +150,7 @@ smoke(Config) ->
%% consume and nack
ConsumerTag2 = <<"ctag2">>,
ok = subscribe(Ch, QName, ConsumerTag2),
- publish(Ch, QName, <<"msg3">>),
+ publish_and_confirm(Ch, QName, <<"msg3">>),
receive
{#'basic.deliver'{delivery_tag = T,
redelivered = false},
@@ -170,6 +162,37 @@ smoke(Config) ->
end,
%% get and ack
basic_ack(Ch, basic_get(Ch, QName)),
+ %% global counters
+ publish_and_confirm(Ch, <<"non-existent_queue">>, <<"msg4">>),
+ ConsumerTag3 = <<"ctag3">>,
+ ok = subscribe(Ch, QName, ConsumerTag3),
+ ProtocolCounters = maps:get([{protocol, amqp091}], get_global_counters(Config)),
+ ?assertEqual(#{
+ messages_confirmed_total => 4,
+ messages_received_confirm_total => 4,
+ messages_received_total => 4,
+ messages_routed_total => 3,
+ messages_unroutable_dropped_total => 1,
+ messages_unroutable_returned_total => 0,
+ consumers => 1,
+ publishers => 1
+ }, ProtocolCounters),
+ QueueType = list_to_atom(
+ "rabbit_" ++
+ binary_to_list(?config(queue_type, Config)) ++
+ "_queue"),
+ ProtocolQueueTypeCounters = maps:get([{protocol, amqp091}, {queue_type, QueueType}],
+ get_global_counters(Config)),
+ ?assertEqual(#{
+ messages_acknowledged_total => 3,
+ messages_delivered_consume_auto_ack_total => 0,
+ messages_delivered_consume_manual_ack_total => 0,
+ messages_delivered_get_auto_ack_total => 0,
+ messages_delivered_get_manual_ack_total => 0,
+ messages_delivered_total => 4,
+ messages_get_empty_total => 2,
+ messages_redelivered_total => 1
+ }, ProtocolQueueTypeCounters),
ok.
ack_after_queue_delete(Config) ->
@@ -181,17 +204,7 @@ ack_after_queue_delete(Config) ->
?config(queue_type, Config)}])),
#'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
amqp_channel:register_confirm_handler(Ch, self()),
- publish(Ch, QName, <<"msg1">>),
- ct:pal("waiting for confirms from ~s", [QName]),
- ok = receive
- #'basic.ack'{} -> ok;
- #'basic.nack'{} ->
- ct:fail("confirm nack - expected ack")
- after 2500 ->
- flush(),
- exit(confirm_timeout)
- end,
-
+ publish_and_confirm(Ch, QName, <<"msg1">>),
DTag = basic_get(Ch, QName),
ChRef = erlang:monitor(process, Ch),
@@ -229,6 +242,17 @@ publish(Ch, Queue, Msg) ->
#amqp_msg{props = #'P_basic'{delivery_mode = 2},
payload = Msg}).
+publish_and_confirm(Ch, Queue, Msg) ->
+ publish(Ch, Queue, Msg),
+ ct:pal("waiting for ~s message confirmation from ~s", [Msg, Queue]),
+ ok = receive
+ #'basic.ack'{} -> ok;
+ #'basic.nack'{} -> fail
+ after 2500 ->
+ flush(),
+ exit(confirm_timeout)
+ end.
+
basic_get(Ch, Queue) ->
{GetOk, _} = Reply = amqp_channel:call(Ch, #'basic.get'{queue = Queue,
no_ack = false}),
@@ -273,3 +297,6 @@ flush() ->
after 0 ->
ok
end.
+
+get_global_counters(Config) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_global_counters, overview, []).
diff --git a/deps/rabbit/test/quorum_queue_SUITE.erl b/deps/rabbit/test/quorum_queue_SUITE.erl
index 36a6d41a61..ab271a169b 100644
--- a/deps/rabbit/test/quorum_queue_SUITE.erl
+++ b/deps/rabbit/test/quorum_queue_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(quorum_queue_SUITE).
@@ -10,17 +10,20 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
-import(quorum_queue_utils, [wait_for_messages_ready/3,
wait_for_messages_pending_ack/3,
wait_for_messages_total/3,
wait_for_messages/2,
dirty_query/3,
- ra_name/1,
- is_mixed_versions/0]).
+ ra_name/1]).
+-compile([nowarn_export_all, export_all]).
-compile(export_all).
+-define(DEFAULT_AWAIT, 10000).
+
suite() ->
[{timetrap, 5 * 60000}].
@@ -33,14 +36,13 @@ all() ->
groups() ->
[
- {single_node, [], all_tests()},
- {single_node, [], memory_tests()},
- {single_node, [], [node_removal_is_quorum_critical]},
+ {single_node, [], all_tests()
+ ++ memory_tests()
+ ++ [node_removal_is_quorum_critical]},
{unclustered, [], [
- {cluster_size_2, [], [add_member]}
+ {uncluster_size_2, [], [add_member]}
]},
{clustered, [], [
- {cluster_size_2, [], [cleanup_data_dir]},
{cluster_size_2, [], [add_member_not_running,
add_member_classic,
add_member_already_a_member,
@@ -50,9 +52,9 @@ groups() ->
delete_member_queue_not_found,
delete_member,
delete_member_not_a_member,
- node_removal_is_quorum_critical]
- ++ all_tests()},
- {cluster_size_2, [], memory_tests()},
+ node_removal_is_quorum_critical,
+ cleanup_data_dir]
+ ++ memory_tests()},
{cluster_size_3, [], [
declare_during_node_down,
simple_confirm_availability_on_leader_change,
@@ -71,7 +73,8 @@ groups() ->
file_handle_reservations,
file_handle_reservations_above_limit,
node_removal_is_not_quorum_critical
- ]},
+ ]
+ ++ all_tests()},
{cluster_size_5, [], [start_queue,
start_queue_concurrent,
quorum_cluster_size_3,
@@ -92,6 +95,7 @@ all_tests() ->
declare_invalid_properties,
declare_server_named,
start_queue,
+ long_name,
stop_queue,
restart_queue,
restart_all_types,
@@ -133,7 +137,8 @@ all_tests() ->
delete_if_unused,
queue_ttl,
peek,
- consumer_priorities
+ consumer_priorities,
+ cancel_consumer_gh_3729
].
memory_tests() ->
@@ -141,6 +146,7 @@ memory_tests() ->
memory_alarm_rolls_wal
].
+-define(SUPNAME, ra_server_sup_sup).
%% -------------------------------------------------------------------
%% Testsuite setup/teardown.
%% -------------------------------------------------------------------
@@ -148,12 +154,10 @@ memory_tests() ->
init_per_suite(Config0) ->
rabbit_ct_helpers:log_environment(),
Config1 = rabbit_ct_helpers:merge_app_env(
- Config0, {rabbit, [{quorum_tick_interval, 1000}]}),
- Config = rabbit_ct_helpers:merge_app_env(
- Config1, {aten, [{poll_interval, 1000}]}),
- rabbit_ct_helpers:run_setup_steps(
- Config,
- [fun rabbit_ct_broker_helpers:configure_dist_proxy/1]).
+ Config0, {rabbit, [{quorum_tick_interval, 1000}]}),
+ rabbit_ct_helpers:merge_app_env(
+ Config1, {aten, [{poll_interval, 1000}]}),
+ rabbit_ct_helpers:run_setup_steps(Config1, []).
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
@@ -162,21 +166,25 @@ init_per_group(clustered, Config) ->
rabbit_ct_helpers:set_config(Config, [{rmq_nodes_clustered, true}]);
init_per_group(unclustered, Config) ->
rabbit_ct_helpers:set_config(Config, [{rmq_nodes_clustered, false}]);
-init_per_group(clustered_with_partitions, Config) ->
- case is_mixed_versions() of
+init_per_group(clustered_with_partitions, Config0) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
true ->
{skip, "clustered_with_partitions is too unreliable in mixed mode"};
false ->
+ Config = rabbit_ct_helpers:run_setup_steps(
+ Config0,
+ [fun rabbit_ct_broker_helpers:configure_dist_proxy/1]),
rabbit_ct_helpers:set_config(Config, [{net_ticktime, 10}])
end;
init_per_group(Group, Config) ->
ClusterSize = case Group of
single_node -> 1;
+ uncluster_size_2 -> 2;
cluster_size_2 -> 2;
cluster_size_3 -> 3;
cluster_size_5 -> 5
end,
- IsMixed = not (false == os:getenv("SECONDARY_UMBRELLA")),
+ IsMixed = rabbit_ct_helpers:is_mixed_versions(),
case ClusterSize of
2 when IsMixed ->
{skip, "cluster size 2 isn't mixed versions compatible"};
@@ -203,13 +211,8 @@ init_per_group(Group, Config) ->
%% HACK: the larger cluster sizes benefit for a bit
%% more time after clustering before running the
%% tests.
- case Group of
- cluster_size_5 ->
- timer:sleep(5000),
- Config2;
- _ ->
- Config2
- end;
+ timer:sleep(ClusterSize * 1000),
+ Config2;
Skip ->
end_per_group(Group, Config2),
Skip
@@ -258,14 +261,48 @@ init_per_testcase(Testcase, Config) when Testcase == reconnect_consumer_and_publ
end
end;
init_per_testcase(Testcase, Config) ->
- Config1 = rabbit_ct_helpers:testcase_started(Config, Testcase),
- rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_queues, []),
- Q = rabbit_data_coercion:to_binary(Testcase),
- Config2 = rabbit_ct_helpers:set_config(Config1,
- [{queue_name, Q},
- {alt_queue_name, <<Q/binary, "_alt">>}
- ]),
- rabbit_ct_helpers:run_steps(Config2, rabbit_ct_client_helpers:setup_steps()).
+ ClusterSize = ?config(rmq_nodes_count, Config),
+ IsMixed = rabbit_ct_helpers:is_mixed_versions(),
+ case Testcase of
+ node_removal_is_not_quorum_critical when IsMixed ->
+ {skip, "node_removal_is_not_quorum_critical isn't mixed versions compatible"};
+ simple_confirm_availability_on_leader_change when IsMixed ->
+ {skip, "simple_confirm_availability_on_leader_change isn't mixed versions compatible"};
+ confirm_availability_on_leader_change when IsMixed ->
+ {skip, "confirm_availability_on_leader_change isn't mixed versions compatible"};
+ recover_from_single_failure when IsMixed ->
+ %% In a 3.8/3.9 cluster this will pass only if the failure occurs on the 3.8 node
+ {skip, "recover_from_single_failure isn't mixed versions compatible"};
+ shrink_all when IsMixed ->
+ %% In a 3.8/3.9 cluster only the first shrink will work as expected
+ {skip, "skrink_all isn't mixed versions compatible"};
+ delete_immediately_by_resource when IsMixed andalso ClusterSize == 3 ->
+ {skip, "delete_immediately_by_resource isn't mixed versions compatible"};
+ queue_ttl when IsMixed andalso ClusterSize == 3 ->
+ {skip, "queue_ttl isn't mixed versions compatible"};
+ start_queue when IsMixed andalso ClusterSize == 5 ->
+ {skip, "start_queue isn't mixed versions compatible"};
+ start_queue_concurrent when IsMixed andalso ClusterSize == 5 ->
+ {skip, "start_queue_concurrent isn't mixed versions compatible"};
+ _ ->
+ Config1 = rabbit_ct_helpers:testcase_started(Config, Testcase),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_queues, []),
+ Q = rabbit_data_coercion:to_binary(Testcase),
+ Config2 = rabbit_ct_helpers:set_config(Config1,
+ [{queue_name, Q},
+ {alt_queue_name, <<Q/binary, "_alt">>}
+ ]),
+ EnableFF = rabbit_ct_broker_helpers:enable_feature_flag(
+ Config2, quorum_queue),
+ case EnableFF of
+ ok ->
+ Config2;
+ Skip ->
+ end_per_testcase(Testcase, Config2),
+ Skip
+ end,
+ rabbit_ct_helpers:run_steps(Config2, rabbit_ct_client_helpers:setup_steps())
+ end.
merge_app_env(Config) ->
rabbit_ct_helpers:merge_app_env(
@@ -281,7 +318,7 @@ end_per_testcase(Testcase, Config) when Testcase == reconnect_consumer_and_publi
rabbit_ct_broker_helpers:teardown_steps()),
rabbit_ct_helpers:testcase_finished(Config1, Testcase);
end_per_testcase(Testcase, Config) ->
- catch delete_queues(),
+ % catch delete_queues(),
Config1 = rabbit_ct_helpers:run_steps(
Config,
rabbit_ct_client_helpers:teardown_steps()),
@@ -350,19 +387,20 @@ start_queue(Config) ->
Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
LQ = ?config(queue_name, Config),
- %% The stream coordinator is also a ra process, we need to ensure the quorum tests
- %% are not affected by any other ra cluster that could be added in the future
- Children = length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup])),
+ Children = length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
?assertEqual({'queue.declare_ok', LQ, 0, 0},
declare(Ch, LQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
%% Check that the application and one ra node are up
- ?assertMatch({ra, _, _}, lists:keyfind(ra, 1,
- rpc:call(Server, application, which_applications, []))),
+ ?awaitMatch({ra, _, _},
+ lists:keyfind(ra, 1,
+ rpc:call(Server, application, which_applications, [])),
+ ?DEFAULT_AWAIT),
Expected = Children + 1,
- ?assertMatch(Expected,
- length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]))),
+ ?awaitMatch(Expected,
+ length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
+ ?DEFAULT_AWAIT),
%% Test declare an existing queue
?assertEqual({'queue.declare_ok', LQ, 0, 0},
@@ -379,7 +417,28 @@ start_queue(Config) ->
?assertMatch({ra, _, _}, lists:keyfind(ra, 1,
rpc:call(Server, application, which_applications, []))),
?assertMatch(Expected,
- length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]))).
+ length(rpc:call(Server, supervisor, which_children, [?SUPNAME]))),
+
+ ok.
+
+
+long_name(Config) ->
+ Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ %% 64 + chars
+ VHost = <<"long_name_vhost____________________________________">>,
+ QName = atom_to_binary(?FUNCTION_NAME, utf8),
+ User = ?config(rmq_username, Config),
+ ok = rabbit_ct_broker_helpers:add_vhost(Config, Node, VHost, User),
+ ok = rabbit_ct_broker_helpers:set_full_permissions(Config, User, VHost),
+ Conn = rabbit_ct_client_helpers:open_unmanaged_connection(Config, Node,
+ VHost),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ %% long name
+ LongName = binary:copy(QName, 240 div byte_size(QName)),
+ ?assertEqual({'queue.declare_ok', LongName, 0, 0},
+ declare(Ch, LongName,
+ [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
+ ok.
start_queue_concurrent(Config) ->
Servers = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -387,20 +446,22 @@ start_queue_concurrent(Config) ->
Self = self(),
[begin
_ = spawn_link(fun () ->
- {_Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, Server),
+ {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, Server),
%% Test declare an existing queue
?assertEqual({'queue.declare_ok', LQ, 0, 0},
declare(Ch, LQ,
[{<<"x-queue-type">>,
longstr,
<<"quorum">>}])),
+ timer:sleep(500),
+ rabbit_ct_client_helpers:close_connection_and_channel(Conn, Ch),
Self ! {done, Server}
end)
end || Server <- Servers],
[begin
receive {done, Server} -> ok
- after 5000 -> exit({await_done_timeout, Server})
+ after 10000 -> exit({await_done_timeout, Server})
end
end || Server <- Servers],
@@ -408,7 +469,7 @@ start_queue_concurrent(Config) ->
ok.
quorum_cluster_size_3(Config) ->
- case is_mixed_versions() of
+ case rabbit_ct_helpers:is_mixed_versions() of
true ->
{skip, "quorum_cluster_size_3 tests isn't mixed version reliable"};
false ->
@@ -427,19 +488,22 @@ quorum_cluster_size_x(Config, Max, Expected) ->
?assertEqual({'queue.declare_ok', QQ, 0, 0},
declare(Ch, QQ, [{<<"x-queue-type">>, longstr, <<"quorum">>},
{<<"x-quorum-initial-group-size">>, long, Max}])),
- {ok, Members, _} = ra:members({RaName, Server}),
- ?assertEqual(Expected, length(Members)),
- Info = rpc:call(Server, rabbit_quorum_queue, infos,
- [rabbit_misc:r(<<"/">>, queue, QQ)]),
- MembersQ = proplists:get_value(members, Info),
- ?assertEqual(Expected, length(MembersQ)).
+ ?awaitMatch({ok, Members, _} when length(Members) == Expected,
+ ra:members({RaName, Server}),
+ ?DEFAULT_AWAIT),
+ ?awaitMatch(MembersQ when length(MembersQ) == Expected,
+ begin
+ Info = rpc:call(Server, rabbit_quorum_queue, infos,
+ [rabbit_misc:r(<<"/">>, queue, QQ)]),
+ proplists:get_value(members, Info)
+ end, ?DEFAULT_AWAIT).
stop_queue(Config) ->
Server = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
%% The stream coordinator is also a ra process, we need to ensure the quorum tests
%% are not affected by any other ra cluster that could be added in the future
- Children = length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup])),
+ Children = length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
LQ = ?config(queue_name, Config),
@@ -447,27 +511,32 @@ stop_queue(Config) ->
declare(Ch, LQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
%% Check that the application and one ra node are up
- ?assertMatch({ra, _, _}, lists:keyfind(ra, 1,
- rpc:call(Server, application, which_applications, []))),
+ ?awaitMatch({ra, _, _},
+ lists:keyfind(ra, 1,
+ rpc:call(Server, application, which_applications, [])),
+ ?DEFAULT_AWAIT),
Expected = Children + 1,
- ?assertMatch(Expected,
- length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]))),
+ ?awaitMatch(Expected,
+ length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
+ ?DEFAULT_AWAIT),
%% Delete the quorum queue
?assertMatch(#'queue.delete_ok'{}, amqp_channel:call(Ch, #'queue.delete'{queue = LQ})),
%% Check that the application and process are down
- wait_until(fun() ->
- Children == length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]))
- end),
- ?assertMatch({ra, _, _}, lists:keyfind(ra, 1,
- rpc:call(Server, application, which_applications, []))).
+ ?awaitMatch(Children,
+ length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
+ 30000),
+ ?awaitMatch({ra, _, _},
+ lists:keyfind(ra, 1,
+ rpc:call(Server, application, which_applications, [])),
+ ?DEFAULT_AWAIT).
restart_queue(Config) ->
Server = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
%% The stream coordinator is also a ra process, we need to ensure the quorum tests
%% are not affected by any other ra cluster that could be added in the future
- Children = length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup])),
+ Children = length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
LQ = ?config(queue_name, Config),
@@ -482,7 +551,7 @@ restart_queue(Config) ->
rpc:call(Server, application, which_applications, []))),
Expected = Children + 1,
?assertMatch(Expected,
- length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]))),
+ length(rpc:call(Server, supervisor, which_children, [?SUPNAME]))),
Ch2 = rabbit_ct_client_helpers:open_channel(Config, Server),
delete_queues(Ch2, [LQ]).
@@ -504,16 +573,23 @@ idempotent_recover(Config) ->
%% kill the vhost process to trigger recover
rpc:call(Server, erlang, exit, [Pid, kill]),
- timer:sleep(1000),
+
%% validate quorum queue is still functional
- RaName = ra_name(LQ),
- {ok, _, _} = ra:members({RaName, Server}),
+ ?awaitMatch({ok, _, _},
+ begin
+ RaName = ra_name(LQ),
+ ra:members({RaName, Server})
+ end, ?DEFAULT_AWAIT),
%% validate vhosts are running - or rather validate that at least one
%% vhost per cluster is running
- [begin
- #{cluster_state := ServerStatuses} = maps:from_list(I),
- ?assertMatch(#{Server := running}, maps:from_list(ServerStatuses))
- end || I <- rpc:call(Server, rabbit_vhost,info_all, [])],
+ ?awaitMatch(true,
+ begin
+ Is = rpc:call(Server, rabbit_vhost,info_all, []),
+ lists:all(fun (I) ->
+ #{cluster_state := ServerStatuses} = maps:from_list(I),
+ maps:get(Server, maps:from_list(ServerStatuses)) =:= running
+ end, Is)
+ end, ?DEFAULT_AWAIT),
ok.
vhost_with_quorum_queue_is_deleted(Config) ->
@@ -530,11 +606,11 @@ vhost_with_quorum_queue_is_deleted(Config) ->
?assertEqual({'queue.declare_ok', QName, 0, 0},
declare(Ch, QName, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
- UId = rpc:call(Node, ra_directory, where_is, [RaName]),
+ UId = rpc:call(Node, ra_directory, where_is, [quorum_queues, RaName]),
?assert(UId =/= undefined),
ok = rabbit_ct_broker_helpers:delete_vhost(Config, VHost),
%% validate quorum queues got deleted
- undefined = rpc:call(Node, ra_directory, where_is, [RaName]),
+ undefined = rpc:call(Node, ra_directory, where_is, [quorum_queues, RaName]),
ok.
restart_all_types(Config) ->
@@ -544,7 +620,7 @@ restart_all_types(Config) ->
%% The stream coordinator is also a ra process, we need to ensure the quorum tests
%% are not affected by any other ra cluster that could be added in the future
- Children = rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]),
+ Children = rpc:call(Server, supervisor, which_children, [?SUPNAME]),
Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
QQ1 = <<"restart_all_types-qq1">>,
@@ -567,18 +643,14 @@ restart_all_types(Config) ->
%% Check that the application and two ra nodes are up. Queues are restored
%% after the broker is marked as "ready", that's why we need to wait for
%% the condition.
- ?assertMatch({ra, _, _}, lists:keyfind(ra, 1,
- rpc:call(Server, application, which_applications, []))),
+ ?awaitMatch({ra, _, _},
+ lists:keyfind(ra, 1,
+ rpc:call(Server, application, which_applications, [])),
+ ?DEFAULT_AWAIT),
Expected = length(Children) + 2,
- ok = rabbit_ct_helpers:await_condition(
- fun() ->
- Expected =:= length(
- rpc:call(
- Server,
- supervisor,
- which_children,
- [ra_server_sup_sup]))
- end, 60000),
+ ?awaitMatch(Expected,
+ length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
+ 60000),
%% Check the classic queues restarted correctly
Ch2 = rabbit_ct_client_helpers:open_channel(Config, Server),
{#'basic.get_ok'{}, #amqp_msg{}} =
@@ -598,7 +670,7 @@ stop_start_rabbit_app(Config) ->
%% The stream coordinator is also a ra process, we need to ensure the quorum tests
%% are not affected by any other ra cluster that could be added in the future
- Children = length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup])),
+ Children = length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
QQ1 = <<"stop_start_rabbit_app-qq">>,
@@ -615,19 +687,32 @@ stop_start_rabbit_app(Config) ->
?assertEqual({'queue.declare_ok', CQ2, 0, 0}, declare(Ch, CQ2, [])),
rabbit_ct_client_helpers:publish(Ch, CQ2, 1),
- rabbit_control_helper:command(stop_app, Server),
+ ?assertEqual(ok, rabbit_control_helper:command(stop_app, Server)),
%% Check the ra application has stopped (thus its supervisor and queues)
- ?assertMatch(false, lists:keyfind(ra, 1,
- rpc:call(Server, application, which_applications, []))),
+ rabbit_ct_helpers:await_condition(
+ fun() ->
+ Apps = rpc:call(Server, application, which_applications, []),
+ %% we expect the app to NOT be running
+ case lists:keyfind(ra, 1, Apps) of
+ false -> true;
+ {ra, _, _} -> false
+ end
+ end),
- rabbit_control_helper:command(start_app, Server),
+ ?assertEqual(ok, rabbit_control_helper:command(start_app, Server)),
%% Check that the application and two ra nodes are up
- ?assertMatch({ra, _, _}, lists:keyfind(ra, 1,
- rpc:call(Server, application, which_applications, []))),
+ rabbit_ct_helpers:await_condition(
+ fun() ->
+ Apps = rpc:call(Server, application, which_applications, []),
+ case lists:keyfind(ra, 1, Apps) of
+ false -> false;
+ {ra, _, _} -> true
+ end
+ end),
Expected = Children + 2,
?assertMatch(Expected,
- length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]))),
+ length(rpc:call(Server, supervisor, which_children, [?SUPNAME]))),
%% Check the classic queues restarted correctly
Ch2 = rabbit_ct_client_helpers:open_channel(Config, Server),
{#'basic.get_ok'{}, #amqp_msg{}} =
@@ -637,6 +722,9 @@ stop_start_rabbit_app(Config) ->
delete_queues(Ch2, [QQ1, QQ2, CQ1, CQ2]).
publish_confirm(Ch, QName) ->
+ publish_confirm(Ch, QName, 2500).
+
+publish_confirm(Ch, QName, Timeout) ->
publish(Ch, QName),
amqp_channel:register_confirm_handler(Ch, self()),
ct:pal("waiting for confirms from ~s", [QName]),
@@ -647,7 +735,7 @@ publish_confirm(Ch, QName) ->
#'basic.nack'{} ->
ct:pal("NOT CONFIRMED! ~s", [QName]),
fail
- after 2500 ->
+ after Timeout ->
exit(confirm_timeout)
end.
@@ -704,18 +792,21 @@ shrink_all(Config) ->
declare(Ch, QQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
?assertEqual({'queue.declare_ok', AQ, 0, 0},
declare(Ch, AQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
- timer:sleep(500),
- Result = rpc:call(Server0, rabbit_quorum_queue, shrink_all, [Server2]),
- ?assertMatch([{_, {ok, 2}}, {_, {ok, 2}}], Result),
- Result1 = rpc:call(Server0, rabbit_quorum_queue, shrink_all, [Server1]),
- ?assertMatch([{_, {ok, 1}}, {_, {ok, 1}}], Result1),
- Result2 = rpc:call(Server0, rabbit_quorum_queue, shrink_all, [Server0]),
- ?assertMatch([{_, {error, 1, last_node}},
- {_, {error, 1, last_node}}], Result2),
+
+ ?awaitMatch([{_, {ok, 2}}, {_, {ok, 2}}],
+ rpc:call(Server0, rabbit_quorum_queue, shrink_all, [Server2]),
+ ?DEFAULT_AWAIT),
+ ?awaitMatch([{_, {ok, 1}}, {_, {ok, 1}}],
+ rpc:call(Server0, rabbit_quorum_queue, shrink_all, [Server1]),
+ ?DEFAULT_AWAIT),
+ ?awaitMatch([{_, {error, 1, last_node}},
+ {_, {error, 1, last_node}}],
+ rpc:call(Server0, rabbit_quorum_queue, shrink_all, [Server0]),
+ ?DEFAULT_AWAIT),
ok.
rebalance(Config) ->
- case is_mixed_versions() of
+ case rabbit_ct_helpers:is_mixed_versions() of
true ->
{skip, "rebalance tests isn't mixed version compatible"};
false ->
@@ -738,10 +829,13 @@ rebalance0(Config) ->
declare(Ch, Q1, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
?assertEqual({'queue.declare_ok', Q2, 0, 0},
declare(Ch, Q2, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
- timer:sleep(1000),
- {ok, _, {_, Leader1}} = ra:members({ra_name(Q1), Server0}),
- {ok, _, {_, Leader2}} = ra:members({ra_name(Q2), Server0}),
+ {ok, _, {_, Leader1}} = ?awaitMatch({ok, _, {_, _}},
+ ra:members({ra_name(Q1), Server0}),
+ ?DEFAULT_AWAIT),
+ {ok, _, {_, Leader2}} = ?awaitMatch({ok, _, {_, _}},
+ ra:members({ra_name(Q2), Server0}),
+ ?DEFAULT_AWAIT),
rabbit_ct_client_helpers:publish(Ch, Q1, 3),
rabbit_ct_client_helpers:publish(Ch, Q2, 2),
@@ -751,20 +845,22 @@ rebalance0(Config) ->
declare(Ch, Q4, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
?assertEqual({'queue.declare_ok', Q5, 0, 0},
declare(Ch, Q5, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
- timer:sleep(500),
- {ok, Summary} = rpc:call(Server0, rabbit_amqqueue, rebalance, [quorum, ".*", ".*"]),
%% Q1 and Q2 should not have moved leader, as these are the queues with more
%% log entries and we allow up to two queues per node (3 nodes, 5 queues)
- ?assertMatch({ok, _, {_, Leader1}}, ra:members({ra_name(Q1), Server0})),
- ?assertMatch({ok, _, {_, Leader2}}, ra:members({ra_name(Q2), Server0})),
+ ?awaitMatch({ok, _, {_, Leader1}}, ra:members({ra_name(Q1), Server0}), ?DEFAULT_AWAIT),
+ ?awaitMatch({ok, _, {_, Leader2}}, ra:members({ra_name(Q2), Server0}), ?DEFAULT_AWAIT),
%% Check that we have at most 2 queues per node
- ?assert(lists:all(fun(NodeData) ->
- lists:all(fun({_, V}) when is_integer(V) -> V =< 2;
- (_) -> true end,
- NodeData)
- end, Summary)),
+ ?awaitMatch(true,
+ begin
+ {ok, Summary} = rpc:call(Server0, rabbit_amqqueue, rebalance, [quorum, ".*", ".*"]),
+ lists:all(fun(NodeData) ->
+ lists:all(fun({_, V}) when is_integer(V) -> V =< 2;
+ (_) -> true end,
+ NodeData)
+ end, Summary)
+ end, ?DEFAULT_AWAIT),
ok.
subscribe_should_fail_when_global_qos_true(Config) ->
@@ -947,7 +1043,7 @@ cleanup_queue_state_on_channel_after_publish(Config) ->
%% The stream coordinator is also a ra process, we need to ensure the quorum tests
%% are not affected by any other ra cluster that could be added in the future
- Children = length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup])),
+ Children = length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
Ch2 = rabbit_ct_client_helpers:open_channel(Config, Server),
@@ -970,7 +1066,7 @@ cleanup_queue_state_on_channel_after_publish(Config) ->
amqp_channel:call(Ch1, #'queue.delete'{queue = QQ})),
wait_until(fun() ->
Children == length(rpc:call(Server, supervisor, which_children,
- [ra_server_sup_sup]))
+ [?SUPNAME]))
end),
%% Check that all queue states have been cleaned
wait_for_cleanup(Server, NCh2, 0),
@@ -983,7 +1079,7 @@ cleanup_queue_state_on_channel_after_subscribe(Config) ->
%% The stream coordinator is also a ra process, we need to ensure the quorum tests
%% are not affected by any other ra cluster that could be added in the future
- Children = length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup])),
+ Children = length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
Ch2 = rabbit_ct_client_helpers:open_channel(Config, Server),
@@ -1011,7 +1107,7 @@ cleanup_queue_state_on_channel_after_subscribe(Config) ->
wait_for_cleanup(Server, NCh2, 1),
?assertMatch(#'queue.delete_ok'{}, amqp_channel:call(Ch1, #'queue.delete'{queue = QQ})),
wait_until(fun() ->
- Children == length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]))
+ Children == length(rpc:call(Server, supervisor, which_children, [?SUPNAME]))
end),
%% Check that all queue states have been cleaned
wait_for_cleanup(Server, NCh1, 0),
@@ -1145,7 +1241,7 @@ leadership_takeover(Config) ->
wait_for_messages_pending_ack(Servers, RaName, 0).
metrics_cleanup_on_leadership_takeover(Config) ->
- case is_mixed_versions() of
+ case rabbit_ct_helpers:is_mixed_versions() of
true ->
{skip, "metrics_cleanup_on_leadership_takeover tests isn't mixed version compatible"};
false ->
@@ -1229,7 +1325,7 @@ metrics_cleanup_on_leader_crash(Config) ->
delete_declare(Config) ->
- case is_mixed_versions() of
+ case rabbit_ct_helpers:is_mixed_versions() of
true ->
{skip, "delete_declare isn't mixed version reliable"};
false ->
@@ -1257,7 +1353,7 @@ delete_declare0(Config) ->
%% the actual data deletions happen after the call has returned as a quorum
%% queue leader waits for all nodes to confirm they replicated the poison
%% pill before terminating itself.
- case is_mixed_versions() of
+ case rabbit_ct_helpers:is_mixed_versions() of
true ->
%% when in mixed versions the QQ may not be able to apply the posion
%% pill for all nodes so need to wait longer for forced delete to
@@ -1349,19 +1445,23 @@ confirm_availability_on_leader_change(Config) ->
declare(DCh, QQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
erlang:process_flag(trap_exit, true),
- Pid = spawn_link(fun () ->
- %% open a channel to another node
- Ch = rabbit_ct_client_helpers:open_channel(Config, Node1),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- ConfirmLoop = fun Loop() ->
- ok = publish_confirm(Ch, QQ),
- receive {done, P} ->
- P ! done,
- ok
- after 0 -> Loop() end
- end,
- ConfirmLoop()
- end),
+ Publisher = spawn_link(
+ fun () ->
+ %% open a channel to another node
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Node1),
+ #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
+ ConfirmLoop = fun Loop() ->
+ ok = publish_confirm(Ch, QQ, 15000),
+ receive
+ {done, P} ->
+ P ! publisher_done,
+ ok
+ after 0 ->
+ Loop()
+ end
+ end,
+ ConfirmLoop()
+ end),
timer:sleep(500),
%% stop the node hosting the leader
@@ -1369,14 +1469,17 @@ confirm_availability_on_leader_change(Config) ->
%% this should not fail as the channel should detect the new leader and
%% resend to that
timer:sleep(500),
- Pid ! {done, self()},
+ Publisher ! {done, self()},
receive
- done -> ok;
- {'EXIT', Pid, Err} ->
+ publisher_done ->
+ ok;
+ {'EXIT', Publisher, Err} ->
+ ok = rabbit_ct_broker_helpers:start_node(Config, Node2),
exit(Err)
- after 5500 ->
+ after 30000 ->
+ ok = rabbit_ct_broker_helpers:start_node(Config, Node2),
flush(100),
- exit(bah)
+ exit(nothing_received_from_publisher_process)
end,
ok = rabbit_ct_broker_helpers:start_node(Config, Node2),
ok.
@@ -1548,7 +1651,7 @@ node_removal_is_not_quorum_critical(Config) ->
file_handle_reservations(Config) ->
- case is_mixed_versions() of
+ case rabbit_ct_helpers:is_mixed_versions() of
true ->
{skip, "file_handle_reservations tests isn't mixed version compatible"};
false ->
@@ -1614,10 +1717,10 @@ cleanup_data_dir(Config) ->
declare(Ch, QQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}])),
timer:sleep(100),
- UId1 = proplists:get_value(ra_name(QQ), rpc:call(Server1, ra_directory, list_registered, [])),
- UId2 = proplists:get_value(ra_name(QQ), rpc:call(Server2, ra_directory, list_registered, [])),
- DataDir1 = rpc:call(Server1, ra_env, server_data_dir, [UId1]),
- DataDir2 = rpc:call(Server2, ra_env, server_data_dir, [UId2]),
+ UId1 = proplists:get_value(ra_name(QQ), rpc:call(Server1, ra_directory, list_registered, [quorum_queues])),
+ UId2 = proplists:get_value(ra_name(QQ), rpc:call(Server2, ra_directory, list_registered, [quorum_queues])),
+ DataDir1 = rpc:call(Server1, ra_env, server_data_dir, [quorum_queues, UId1]),
+ DataDir2 = rpc:call(Server2, ra_env, server_data_dir, [quorum_queues, UId2]),
?assert(filelib:is_dir(DataDir1)),
?assert(filelib:is_dir(DataDir2)),
@@ -1769,7 +1872,7 @@ delete_immediately_by_resource(Config) ->
%% The stream coordinator is also a ra process, we need to ensure the quorum tests
%% are not affected by any other ra cluster that could be added in the future
- Children = length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup])),
+ Children = length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
QQ = ?config(queue_name, Config),
?assertEqual({'queue.declare_ok', QQ, 0, 0},
@@ -1778,11 +1881,12 @@ delete_immediately_by_resource(Config) ->
?assertEqual({ok, "ok\n"}, rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, Cmd2)),
%% Check that the application and process are down
- wait_until(fun() ->
- Children == length(rpc:call(Server, supervisor, which_children, [ra_server_sup_sup]))
- end),
- ?assertMatch({ra, _, _}, lists:keyfind(ra, 1,
- rpc:call(Server, application, which_applications, []))).
+ ?awaitMatch(Children,
+ length(rpc:call(Server, supervisor, which_children, [?SUPNAME])),
+ 60000),
+ ?awaitMatch({ra, _, _}, lists:keyfind(ra, 1,
+ rpc:call(Server, application, which_applications, [])),
+ ?DEFAULT_AWAIT).
subscribe_redelivery_count(Config) ->
[Server | _] = Servers = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1878,7 +1982,7 @@ subscribe_redelivery_limit(Config) ->
receive
{#'basic.deliver'{redelivered = true}, #amqp_msg{}} ->
throw(unexpected_redelivery)
- after 2000 ->
+ after 5000 ->
ok
end.
@@ -1924,7 +2028,7 @@ subscribe_redelivery_policy(Config) ->
receive
{#'basic.deliver'{redelivered = true}, #amqp_msg{}} ->
throw(unexpected_redelivery)
- after 2000 ->
+ after 5000 ->
ok
end,
ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"delivery-limit">>).
@@ -2082,7 +2186,7 @@ message_bytes_metrics(Config) ->
memory_alarm_rolls_wal(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
- WalDataDir = rpc:call(Server, ra_env, wal_data_dir, []),
+ #{wal_data_dir := WalDataDir} = ra_system:fetch(quorum_queues, Server),
[Wal0] = filelib:wildcard(WalDataDir ++ "/*.wal"),
rabbit_ct_broker_helpers:set_alarm(Config, Server, memory),
rabbit_ct_helpers:await_condition(
@@ -2100,9 +2204,10 @@ memory_alarm_rolls_wal(Config) ->
timer:sleep(1000),
[Wal2] = filelib:wildcard(WalDataDir ++ "/*.wal"),
?assert(Wal1 == Wal2),
- ok = rpc:call(Server, rabbit_alarm, clear_alarm,
- [{{resource_limit, memory, Server}, []}]),
- timer:sleep(1000),
+ lists:foreach(fun (Node) ->
+ ok = rabbit_ct_broker_helpers:clear_alarm(Config, Node, memory)
+ end, rabbit_ct_broker_helpers:get_node_configs(Config, nodename)),
+ ?awaitMatch([], rabbit_ct_broker_helpers:get_alarms(Config, Server), ?DEFAULT_AWAIT),
ok.
queue_length_limit_drop_head(Config) ->
@@ -2243,6 +2348,8 @@ queue_length_in_memory_limit(Config) ->
Msg2 = <<"msg11">>,
Msg3 = <<"msg111">>,
Msg4 = <<"msg1111">>,
+ Msg5 = <<"msg1111">>,
+
publish(Ch, QQ, Msg1),
publish(Ch, QQ, Msg2),
@@ -2261,7 +2368,12 @@ queue_length_in_memory_limit(Config) ->
wait_for_messages(Config, [[QQ, <<"3">>, <<"3">>, <<"0">>]]),
?assertEqual([{2, byte_size(Msg2) + byte_size(Msg4)}],
- dirty_query([Server], RaName, fun rabbit_fifo:query_in_memory_usage/1)).
+ dirty_query([Server], RaName, fun rabbit_fifo:query_in_memory_usage/1)),
+ publish(Ch, QQ, Msg5),
+ wait_for_messages(Config, [[QQ, <<"4">>, <<"4">>, <<"0">>]]),
+ ExpectedMsgs = [Msg2, Msg3, Msg4, Msg5],
+ validate_queue(Ch, QQ, ExpectedMsgs),
+ ok.
queue_length_in_memory_limit_returns(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -2655,6 +2767,53 @@ consumer_priorities(Config) ->
ok.
+cancel_consumer_gh_3729(Config) ->
+ %% Test the scenario where a message is published to a quorum queue
+ %% but the consumer has been cancelled
+ %% https://github.com/rabbitmq/rabbitmq-server/pull/3746
+ QQ = ?config(queue_name, Config),
+
+ Server = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
+
+ ExpectedDeclareRslt0 = #'queue.declare_ok'{queue = QQ, message_count = 0, consumer_count = 0},
+ DeclareRslt0 = declare(Ch, QQ, [{<<"x-queue-type">>, longstr, <<"quorum">>}]),
+ ?assertMatch(ExpectedDeclareRslt0, DeclareRslt0),
+
+ ok = publish(Ch, QQ),
+
+ ok = subscribe(Ch, QQ, false),
+
+ receive
+ {#'basic.deliver'{delivery_tag = DeliveryTag}, _} ->
+ R = #'basic.reject'{delivery_tag = DeliveryTag, requeue = true},
+ ok = amqp_channel:cast(Ch, R)
+ after 5000 ->
+ flush(100),
+ ct:fail("basic.deliver timeout")
+ end,
+
+ ok = cancel(Ch),
+
+ receive
+ #'basic.cancel_ok'{consumer_tag = <<"ctag">>} -> ok
+ after 5000 ->
+ flush(100),
+ ct:fail("basic.cancel_ok timeout")
+ end,
+
+ D = #'queue.declare'{queue = QQ, passive = true, arguments = [{<<"x-queue-type">>, longstr, <<"quorum">>}]},
+
+ F = fun() ->
+ #'queue.declare_ok'{queue = QQ,
+ message_count = MC,
+ consumer_count = CC} = amqp_channel:call(Ch, D),
+ MC =:= 1 andalso CC =:= 0
+ end,
+ wait_until(F),
+
+ ok = rabbit_ct_client_helpers:close_channel(Ch).
+
%%----------------------------------------------------------------------------
declare(Ch, Q) ->
@@ -2713,6 +2872,10 @@ qos(Ch, Prefetch, Global) ->
amqp_channel:call(Ch, #'basic.qos'{global = Global,
prefetch_count = Prefetch})).
+cancel(Ch) ->
+ ?assertMatch(#'basic.cancel_ok'{consumer_tag = <<"ctag">>},
+ amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = <<"ctag">>})).
+
receive_basic_deliver(Redelivered) ->
receive
{#'basic.deliver'{redelivered = R}, _} when R == Redelivered ->
@@ -2790,3 +2953,21 @@ queue_names(Records) ->
#resource{name = Name} = amqqueue:get_name(Q),
Name
end || Q <- Records].
+
+
+validate_queue(Ch, Queue, ExpectedMsgs) ->
+ qos(Ch, length(ExpectedMsgs), false),
+ subscribe(Ch, Queue, false),
+ [begin
+ receive
+ {#'basic.deliver'{delivery_tag = DeliveryTag1,
+ redelivered = false},
+ #amqp_msg{payload = M}} ->
+ amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DeliveryTag1,
+ multiple = false})
+ after 5000 ->
+ flush(10),
+ exit({validate_queue_timeout, M})
+ end
+ end || M <- ExpectedMsgs],
+ ok.
diff --git a/deps/rabbit/test/quorum_queue_utils.erl b/deps/rabbit/test/quorum_queue_utils.erl
index 224abeeeeb..7970f4067c 100644
--- a/deps/rabbit/test/quorum_queue_utils.erl
+++ b/deps/rabbit/test/quorum_queue_utils.erl
@@ -7,11 +7,13 @@
wait_for_messages_pending_ack/3,
wait_for_messages_total/3,
wait_for_messages/2,
+ wait_for_messages/3,
+ wait_for_min_messages/3,
+ wait_for_max_messages/3,
dirty_query/3,
ra_name/1,
fifo_machines_use_same_version/1,
- fifo_machines_use_same_version/2,
- is_mixed_versions/0
+ fifo_machines_use_same_version/2
]).
wait_for_messages_ready(Servers, QName, Ready) ->
@@ -34,7 +36,7 @@ wait_for_messages(Servers, QName, Number, Fun, N) ->
ct:pal("Got messages ~p ~p", [QName, Msgs]),
%% hack to allow the check to succeed in mixed versions clusters if at
%% least one node matches the criteria rather than all nodes for
- F = case is_mixed_versions() of
+ F = case rabbit_ct_helpers:is_mixed_versions() of
true ->
any;
false ->
@@ -75,6 +77,58 @@ wait_for_messages(Config, Stats, N) ->
wait_for_messages(Config, Stats, N - 1)
end.
+wait_for_min_messages(Config, Queue, Msgs) ->
+ wait_for_min_messages(Config, Queue, Msgs, 60).
+
+wait_for_min_messages(Config, Queue, Msgs, 0) ->
+ [[_, Got]] = filter_queues([[Queue, Msgs]],
+ rabbit_ct_broker_helpers:rabbitmqctl_list(
+ Config, 0, ["list_queues", "name", "messages"])),
+ ct:pal("Got ~p messages on queue ~p", [Got, Queue]),
+ ?assert(binary_to_integer(Got) >= Msgs);
+wait_for_min_messages(Config, Queue, Msgs, N) ->
+ case filter_queues([[Queue, Msgs]],
+ rabbit_ct_broker_helpers:rabbitmqctl_list(
+ Config, 0, ["list_queues", "name", "messages"])) of
+ [[_, Msgs0]] ->
+ case (binary_to_integer(Msgs0) >= Msgs) of
+ true ->
+ ok;
+ false ->
+ timer:sleep(500),
+ wait_for_min_messages(Config, Queue, Msgs, N - 1)
+ end;
+ _ ->
+ timer:sleep(500),
+ wait_for_min_messages(Config, Queue, Msgs, N - 1)
+ end.
+
+wait_for_max_messages(Config, Queue, Msgs) ->
+ wait_for_max_messages(Config, Queue, Msgs, 60).
+
+wait_for_max_messages(Config, Queue, Msgs, 0) ->
+ [[_, Got]] = filter_queues([[Queue, Msgs]],
+ rabbit_ct_broker_helpers:rabbitmqctl_list(
+ Config, 0, ["list_queues", "name", "messages"])),
+ ct:pal("Got ~p messages on queue ~p", [Got, Queue]),
+ ?assert(binary_to_integer(Got) =< Msgs);
+wait_for_max_messages(Config, Queue, Msgs, N) ->
+ case filter_queues([[Queue, Msgs]],
+ rabbit_ct_broker_helpers:rabbitmqctl_list(
+ Config, 0, ["list_queues", "name", "messages"])) of
+ [[_, Msgs0]] ->
+ case (binary_to_integer(Msgs0) =< Msgs) of
+ true ->
+ ok;
+ false ->
+ timer:sleep(500),
+ wait_for_max_messages(Config, Queue, Msgs, N - 1)
+ end;
+ _ ->
+ timer:sleep(500),
+ wait_for_max_messages(Config, Queue, Msgs, N - 1)
+ end.
+
dirty_query(Servers, QName, Fun) ->
lists:map(
fun(N) ->
@@ -90,9 +144,9 @@ ra_name(Q) ->
binary_to_atom(<<"%2F_", Q/binary>>, utf8).
filter_queues(Expected, Got) ->
- Keys = [K || [K, _, _, _] <- Expected],
- lists:filter(fun([K, _, _, _]) ->
- lists:member(K, Keys)
+ Keys = [hd(E) || E <- Expected],
+ lists:filter(fun(G) ->
+ lists:member(hd(G), Keys)
end, Got).
fifo_machines_use_same_version(Config) ->
@@ -107,6 +161,3 @@ fifo_machines_use_same_version(Config, Nodenames)
rabbit_fifo, version, []))
|| Nodename <- Nodenames],
lists:all(fun(V) -> V =:= MachineAVersion end, OtherMachinesVersions).
-
-is_mixed_versions() ->
- not (false == os:getenv("SECONDARY_UMBRELLA")).
diff --git a/deps/rabbit/test/rabbit_auth_backend_context_propagation_mock.erl b/deps/rabbit/test/rabbit_auth_backend_context_propagation_mock.erl
index e721f5e0dd..9a05bf09cd 100644
--- a/deps/rabbit/test/rabbit_auth_backend_context_propagation_mock.erl
+++ b/deps/rabbit/test/rabbit_auth_backend_context_propagation_mock.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% A mock authn/authz that records information during calls. For testing purposes only.
diff --git a/deps/rabbit/test/rabbit_core_metrics_gc_SUITE.erl b/deps/rabbit/test/rabbit_core_metrics_gc_SUITE.erl
index cae5502a0a..93312f597d 100644
--- a/deps/rabbit/test/rabbit_core_metrics_gc_SUITE.erl
+++ b/deps/rabbit/test/rabbit_core_metrics_gc_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_core_metrics_gc_SUITE).
diff --git a/deps/rabbit/test/rabbit_dummy_protocol_connection_info.erl b/deps/rabbit/test/rabbit_dummy_protocol_connection_info.erl
index 92c01d2b0e..67bf3df9bb 100644
--- a/deps/rabbit/test/rabbit_dummy_protocol_connection_info.erl
+++ b/deps/rabbit/test/rabbit_dummy_protocol_connection_info.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% Dummy module to test rabbit_direct:extract_extra_auth_props
diff --git a/deps/rabbit/test/rabbit_fifo_SUITE.erl b/deps/rabbit/test/rabbit_fifo_SUITE.erl
index 7b90d91bfa..cf853bca98 100644
--- a/deps/rabbit/test/rabbit_fifo_SUITE.erl
+++ b/deps/rabbit/test/rabbit_fifo_SUITE.erl
@@ -11,7 +11,7 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("rabbit_common/include/rabbit.hrl").
--include("src/rabbit_fifo.hrl").
+-include_lib("rabbit/src/rabbit_fifo.hrl").
%%%===================================================================
%%% Common Test callbacks
@@ -177,7 +177,8 @@ enq_enq_deq_test(_) ->
{State2, _} = enq(2, 2, second, State1),
% get returns a reply value
NumReady = 1,
- {_State3, {dequeue, {0, {_, first}}, NumReady}, [{monitor, _, _}]} =
+ {_State3, {dequeue, {0, {_, first}}, NumReady},
+ [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _}, {monitor, _, _}]} =
apply(meta(3), rabbit_fifo:make_checkout(Cid, {dequeue, unsettled}, #{}),
State2),
ok.
@@ -187,7 +188,8 @@ enq_enq_deq_deq_settle_test(_) ->
{State1, _} = enq(1, 1, first, test_init(test)),
{State2, _} = enq(2, 2, second, State1),
% get returns a reply value
- {State3, {dequeue, {0, {_, first}}, 1}, [{monitor, _, _}]} =
+ {State3, {dequeue, {0, {_, first}}, 1},
+ [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _}, {monitor, _, _}]} =
apply(meta(3), rabbit_fifo:make_checkout(Cid, {dequeue, unsettled}, #{}),
State2),
{_State4, {dequeue, empty}} =
@@ -207,7 +209,7 @@ enq_enq_checkout_get_settled_test(_) ->
checkout_get_empty_test(_) ->
Cid = {?FUNCTION_NAME, self()},
State = test_init(test),
- {_State2, {dequeue, empty}} =
+ {_State2, {dequeue, empty}, _} =
apply(meta(1), rabbit_fifo:make_checkout(Cid, {dequeue, unsettled}, #{}), State),
ok.
@@ -235,7 +237,8 @@ release_cursor_test(_) ->
checkout_enq_settle_test(_) ->
Cid = {?FUNCTION_NAME, self()},
- {State1, [{monitor, _, _} | _]} = check(Cid, 1, test_init(test)),
+ {State1, [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _},
+ {monitor, _, _} | _]} = check(Cid, 1, test_init(test)),
{State2, Effects0} = enq(2, 1, first, State1),
?ASSERT_EFF({send_msg, _,
{delivery, ?FUNCTION_NAME,
@@ -250,7 +253,8 @@ checkout_enq_settle_test(_) ->
out_of_order_enqueue_test(_) ->
Cid = {?FUNCTION_NAME, self()},
- {State1, [{monitor, _, _} | _]} = check_n(Cid, 5, 5, test_init(test)),
+ {State1, [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _},
+ {monitor, _, _} | _]} = check_n(Cid, 5, 5, test_init(test)),
{State2, Effects2} = enq(2, 1, first, State1),
?ASSERT_EFF({send_msg, _, {delivery, _, [{_, {_, first}}]}, _}, Effects2),
% assert monitor was set up
@@ -280,7 +284,8 @@ out_of_order_first_enqueue_test(_) ->
duplicate_enqueue_test(_) ->
Cid = {<<"duplicate_enqueue_test">>, self()},
- {State1, [{monitor, _, _} | _]} = check_n(Cid, 5, 5, test_init(test)),
+ {State1, [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _},
+ {monitor, _, _} | _]} = check_n(Cid, 5, 5, test_init(test)),
{State2, Effects2} = enq(2, 1, first, State1),
?ASSERT_EFF({send_msg, _, {delivery, _, [{_, {_, first}}]}, _}, Effects2),
{_State3, Effects3} = enq(3, 1, first, State2),
@@ -331,7 +336,8 @@ return_non_existent_test(_) ->
return_checked_out_test(_) ->
Cid = {<<"cid">>, self()},
{State0, [_, _]} = enq(1, 1, first, test_init(test)),
- {State1, [_Monitor,
+ {State1, [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _},
+ _Monitor,
{send_msg, _, {delivery, _, [{MsgId, _}]}, _},
{aux, active} | _ ]} = check_auto(Cid, 2, State0),
% returning immediately checks out the same message again
@@ -348,7 +354,8 @@ return_checked_out_limit_test(_) ->
release_cursor_interval => 0,
delivery_limit => 1}),
{State0, [_, _]} = enq(1, 1, first, Init),
- {State1, [_Monitor,
+ {State1, [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _},
+ _Monitor,
{send_msg, _, {delivery, _, [{MsgId, _}]}, _},
{aux, active} | _ ]} = check_auto(Cid, 2, State0),
% returning immediately checks out the same message again
@@ -366,7 +373,8 @@ return_auto_checked_out_test(_) ->
{State0, [_]} = enq(2, 2, second, State00),
% it first active then inactive as the consumer took on but cannot take
% any more
- {State1, [_Monitor,
+ {State1, [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _},
+ _Monitor,
{send_msg, _, {delivery, _, [{MsgId, _}]}, _},
{aux, active},
{aux, inactive}
@@ -378,6 +386,18 @@ return_auto_checked_out_test(_) ->
Effects),
ok.
+cancelled_checkout_empty_queue_test(_) ->
+ Cid = {<<"cid">>, self()},
+ {State1, _} = check_auto(Cid, 2, test_init(test)),
+ % cancelled checkout should clear out service_queue also, else we'd get a
+ % build up of these
+ {State2, _, Effects} = apply(meta(3), rabbit_fifo:make_checkout(Cid, cancel, #{}), State1),
+ ?assertEqual(0, map_size(State2#rabbit_fifo.consumers)),
+ ?assertEqual(0, priority_queue:len(State2#rabbit_fifo.service_queue)),
+ ct:pal("Effs: ~p", [Effects]),
+ ?ASSERT_EFF({release_cursor, _, _}, Effects),
+ ok.
+
cancelled_checkout_out_test(_) ->
Cid = {<<"cid">>, self()},
{State00, [_, _]} = enq(1, 1, first, test_init(test)),
@@ -387,6 +407,7 @@ cancelled_checkout_out_test(_) ->
{State2, _, _} = apply(meta(3), rabbit_fifo:make_checkout(Cid, cancel, #{}), State1),
?assertEqual(1, lqueue:len(State2#rabbit_fifo.messages)),
?assertEqual(0, lqueue:len(State2#rabbit_fifo.returns)),
+ ?assertEqual(0, priority_queue:len(State2#rabbit_fifo.service_queue)),
{State3, {dequeue, empty}} =
apply(meta(3), rabbit_fifo:make_checkout(Cid, {dequeue, settled}, #{}), State2),
@@ -401,7 +422,7 @@ cancelled_checkout_out_test(_) ->
down_with_noproc_consumer_returns_unsettled_test(_) ->
Cid = {<<"down_consumer_returns_unsettled_test">>, self()},
{State0, [_, _]} = enq(1, 1, second, test_init(test)),
- {State1, [{monitor, process, Pid} | _]} = check(Cid, 2, State0),
+ {State1, [_, {monitor, process, Pid} | _]} = check(Cid, 2, State0),
{State2, _, _} = apply(meta(3), {down, Pid, noproc}, State1),
{_State, Effects} = check(Cid, 4, State2),
?ASSERT_EFF({monitor, process, _}, Effects),
@@ -495,6 +516,27 @@ discarded_message_with_dead_letter_handler_emits_log_effect_test(_) ->
?ASSERT_EFF({log, _RaftIdxs, _}, Effects2),
ok.
+mixed_send_msg_and_log_effects_are_correctly_ordered_test(_) ->
+ Cid = {cid(?FUNCTION_NAME), self()},
+ State00 = init(#{name => test,
+ queue_resource => rabbit_misc:r(<<"/">>, queue, <<"test">>),
+ max_in_memory_length =>1,
+ dead_letter_handler =>
+ {somemod, somefun, [somearg]}}),
+ %% enqueue two messages
+ {State0, _} = enq(1, 1, first, State00),
+ {State1, _} = enq(2, 2, snd, State0),
+
+ {_State2, Effects1} = check_n(Cid, 3, 10, State1),
+ ct:pal("Effects ~w", [Effects1]),
+ %% in this case we expect no send_msg effect as any in memory messages
+ %% should be weaved into the send_msg effect emitted by the log effect
+ %% later. hence this is all we can assert on
+ %% as we need to send message is in the correct order to the consuming
+ %% channel or the channel may think a message has been lost in transit
+ ?ASSERT_NO_EFF({send_msg, _, _, _}, Effects1),
+ ok.
+
tick_test(_) ->
Cid = {<<"c">>, self()},
Cid2 = {<<"c2">>, self()},
@@ -600,7 +642,8 @@ purge_test(_) ->
{State2, {purge, 1}, _} = apply(meta(2), rabbit_fifo:make_purge(), State1),
{State3, _} = enq(3, 2, second, State2),
% get returns a reply value
- {_State4, {dequeue, {0, {_, second}}, _}, [{monitor, _, _}]} =
+ {_State4, {dequeue, {0, {_, second}}, _},
+ [{mod_call, rabbit_quorum_queue, spawn_notify_decorators, _}, {monitor, _, _}]} =
apply(meta(4), rabbit_fifo:make_checkout(Cid, {dequeue, unsettled}, #{}), State3),
ok.
@@ -1036,7 +1079,8 @@ single_active_consumer_state_enter_eol_include_waiting_consumers_test(_) ->
Effects = rabbit_fifo:state_enter(eol, State1),
%% 1 effect for each consumer process (channel process),
%% 1 effect for file handle reservation
- ?assertEqual(4, length(Effects)).
+ %% 1 effect for eol to handle rabbit_fifo_usage entries
+ ?assertEqual(5, length(Effects)).
query_consumers_test(_) ->
State0 = init(#{name => ?FUNCTION_NAME,
@@ -1137,12 +1181,12 @@ active_flag_updated_when_consumer_suspected_unsuspected_test(_) ->
{State2, _, Effects2} = apply(#{index => 3,
system_time => 1500}, {down, Pid1, noconnection}, State1),
- % 1 effect to update the metrics of each consumer (they belong to the same node), 1 more effect to monitor the node
- ?assertEqual(4 + 1, length(Effects2)),
+ % 1 effect to update the metrics of each consumer (they belong to the same node), 1 more effect to monitor the node, 1 more decorators effect
+ ?assertEqual(4 + 1 + 1, length(Effects2)),
{_, _, Effects3} = apply(#{index => 4}, {nodeup, node(self())}, State2),
- % for each consumer: 1 effect to update the metrics, 1 effect to monitor the consumer PID
- ?assertEqual(4 + 4, length(Effects3)).
+ % for each consumer: 1 effect to update the metrics, 1 effect to monitor the consumer PID, 1 more decorators effect
+ ?assertEqual(4 + 4 + 1, length(Effects3)).
active_flag_not_updated_when_consumer_suspected_unsuspected_and_single_active_consumer_is_on_test(_) ->
State0 = init(#{name => ?FUNCTION_NAME,
@@ -1171,11 +1215,11 @@ active_flag_not_updated_when_consumer_suspected_unsuspected_and_single_active_co
{State2, _, Effects2} = apply(meta(2), {down, Pid1, noconnection}, State1),
% one monitor and one consumer status update (deactivated)
- ?assertEqual(3, length(Effects2)),
+ ?assertEqual(4, length(Effects2)),
{_, _, Effects3} = apply(meta(3), {nodeup, node(self())}, State2),
% for each consumer: 1 effect to monitor the consumer PID
- ?assertEqual(5, length(Effects3)).
+ ?assertEqual(6, length(Effects3)).
single_active_cancelled_with_unacked_test(_) ->
State0 = init(#{name => ?FUNCTION_NAME,
@@ -1518,6 +1562,32 @@ machine_version_test(_) ->
?assert(priority_queue:is_queue(S)),
ok.
+machine_version_waiting_consumer_test(_) ->
+ V0 = rabbit_fifo_v0,
+ S0 = V0:init(#{name => ?FUNCTION_NAME,
+ queue_resource => rabbit_misc:r(<<"/">>, queue, <<"test">>)}),
+ Idx = 1,
+ {#rabbit_fifo{}, ok, []} = apply(meta(Idx), {machine_version, 0, 1}, S0),
+
+ Cid = {atom_to_binary(?FUNCTION_NAME, utf8), self()},
+ Entries = [
+ {1, rabbit_fifo_v0:make_enqueue(self(), 1, banana)},
+ {2, rabbit_fifo_v0:make_enqueue(self(), 2, apple)},
+ {3, rabbit_fifo_v0:make_checkout(Cid, {auto, 5, unsettled}, #{})}
+ ],
+ {S1, _Effects} = rabbit_fifo_v0_SUITE:run_log(S0, Entries),
+ Self = self(),
+ {#rabbit_fifo{enqueuers = #{Self := #enqueuer{}},
+ consumers = #{Cid := #consumer{priority = 0}},
+ service_queue = S,
+ messages = Msgs}, ok, []} = apply(meta(Idx),
+ {machine_version, 0, 1}, S1),
+ %% validate message conversion to lqueue
+ ?assertEqual(0, lqueue:len(Msgs)),
+ ?assert(priority_queue:is_queue(S)),
+ ?assertEqual(1, priority_queue:len(S)),
+ ok.
+
queue_ttl_test(_) ->
QName = rabbit_misc:r(<<"/">>, queue, <<"test">>),
Conf = #{name => ?FUNCTION_NAME,
@@ -1556,7 +1626,7 @@ queue_ttl_test(_) ->
= rabbit_fifo:tick(Now + 2500, S2D),
%% dequeue should set last applied
- {S1Deq, {dequeue, empty}} =
+ {S1Deq, {dequeue, empty}, _} =
apply(meta(2, Now),
rabbit_fifo:make_checkout(Cid, {dequeue, unsettled}, #{}),
S0),
@@ -1657,6 +1727,17 @@ checkout_priority_test(_) ->
?ASSERT_EFF({send_msg, P, {delivery, _, _}, _}, P == Pid, E5),
ok.
+empty_dequeue_should_emit_release_cursor_test(_) ->
+ State0 = test_init(?FUNCTION_NAME),
+ Cid = <<"basic.get1">>,
+ {_State, {dequeue, empty}, Effects} =
+ apply(meta(2, 1234),
+ rabbit_fifo:make_checkout(Cid, {dequeue, unsettled}, #{}),
+ State0),
+
+ ?ASSERT_EFF({release_cursor, _, _}, Effects),
+ ok.
+
%% Utility
init(Conf) -> rabbit_fifo:init(Conf).
@@ -1665,3 +1746,6 @@ apply(Meta, Entry, State) -> rabbit_fifo:apply(Meta, Entry, State).
init_aux(Conf) -> rabbit_fifo:init_aux(Conf).
handle_aux(S, T, C, A, L, M) -> rabbit_fifo:handle_aux(S, T, C, A, L, M).
make_checkout(C, S, M) -> rabbit_fifo:make_checkout(C, S, M).
+
+cid(A) when is_atom(A) ->
+ atom_to_binary(A, utf8).
diff --git a/deps/rabbit/test/rabbit_fifo_int_SUITE.erl b/deps/rabbit/test/rabbit_fifo_int_SUITE.erl
index 37f5436dbf..ec40d599f5 100644
--- a/deps/rabbit/test/rabbit_fifo_int_SUITE.erl
+++ b/deps/rabbit/test/rabbit_fifo_int_SUITE.erl
@@ -9,6 +9,7 @@
-include_lib("rabbit_common/include/rabbit.hrl").
-define(RA_EVENT_TIMEOUT, 5000).
+-define(RA_SYSTEM, quorum_queues).
all() ->
[
@@ -48,6 +49,8 @@ init_per_group(_, Config) ->
ok = application:set_env(ra, data_dir, PrivDir),
application:ensure_all_started(ra),
application:ensure_all_started(lg),
+ SysCfg = ra_system:default_config(),
+ ra_system:start(SysCfg#{name => ?RA_SYSTEM}),
Config.
end_per_group(_, Config) ->
@@ -61,7 +64,7 @@ init_per_testcase(TestCase, Config) ->
meck:expect(rabbit_quorum_queue, file_handle_other_reservation, fun () -> ok end),
meck:expect(rabbit_quorum_queue, cancel_consumer_handler,
fun (_, _) -> ok end),
- ra_server_sup_sup:remove_all(),
+ ra_server_sup_sup:remove_all(?RA_SYSTEM),
ServerName2 = list_to_atom(atom_to_list(TestCase) ++ "2"),
ServerName3 = list_to_atom(atom_to_list(TestCase) ++ "3"),
ClusterName = rabbit_misc:r("/", queue, atom_to_binary(TestCase, utf8)),
@@ -89,7 +92,7 @@ basics(Config) ->
{ok, FState1} = rabbit_fifo_client:checkout(CustomerTag, 1, simple_prefetch,
#{}, FState0),
- ra_log_wal:force_roll_over(ra_log_wal),
+ rabbit_quorum_queue:wal_force_roll_over(node()),
% create segment the segment will trigger a snapshot
timer:sleep(1000),
@@ -112,8 +115,8 @@ basics(Config) ->
% process settle applied notification
FState5b = process_ra_event(FState5, ?RA_EVENT_TIMEOUT),
- _ = ra:stop_server(ServerId),
- _ = ra:restart_server(ServerId),
+ _ = rabbit_quorum_queue:stop_server(ServerId),
+ _ = rabbit_quorum_queue:restart_server(ServerId),
%% wait for leader change to notice server is up again
receive
@@ -137,7 +140,7 @@ basics(Config) ->
after 2000 ->
exit(await_msg_timeout)
end,
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
return(Config) ->
@@ -152,7 +155,7 @@ return(Config) ->
{ok, _, {_, _, MsgId, _, _}, F} = rabbit_fifo_client:dequeue(<<"tag">>, unsettled, F2),
_F2 = rabbit_fifo_client:return(<<"tag">>, [MsgId], F),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
rabbit_fifo_returns_correlation(Config) ->
@@ -172,7 +175,7 @@ rabbit_fifo_returns_correlation(Config) ->
after 2000 ->
exit(await_msg_timeout)
end,
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
duplicate_delivery(Config) ->
@@ -207,7 +210,7 @@ duplicate_delivery(Config) ->
end
end,
Fun(F2),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
usage(Config) ->
@@ -223,7 +226,7 @@ usage(Config) ->
ServerId ! tick_timeout,
timer:sleep(50),
Use = rabbit_fifo:usage(element(1, ServerId)),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
?assert(Use > 0.0),
ok.
@@ -245,7 +248,7 @@ resends_lost_command(Config) ->
{ok, _, {_, _, _, _, msg1}, F5} = rabbit_fifo_client:dequeue(<<"tag">>, settled, F4),
{ok, _, {_, _, _, _, msg2}, F6} = rabbit_fifo_client:dequeue(<<"tag">>, settled, F5),
{ok, _, {_, _, _, _, msg3}, _F7} = rabbit_fifo_client:dequeue(<<"tag">>, settled, F6),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
two_quick_enqueues(Config) ->
@@ -257,7 +260,7 @@ two_quick_enqueues(Config) ->
F1 = element(2, rabbit_fifo_client:enqueue(msg1, F0)),
{ok, F2} = rabbit_fifo_client:enqueue(msg2, F1),
_ = process_ra_events(receive_ra_events(2, 0), F2),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
detects_lost_delivery(Config) ->
@@ -281,7 +284,7 @@ detects_lost_delivery(Config) ->
% assert three deliveries were received
{[_, _, _], _, _} = process_ra_events(receive_ra_events(2, 2), F3),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
returns_after_down(Config) ->
@@ -306,7 +309,7 @@ returns_after_down(Config) ->
timer:sleep(1000),
% message should be available for dequeue
{ok, _, {_, _, _, _, msg1}, _} = rabbit_fifo_client:dequeue(<<"tag">>, settled, F2),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
resends_after_lost_applied(Config) ->
@@ -331,7 +334,7 @@ resends_after_lost_applied(Config) ->
{ok, _, {_, _, _, _, msg1}, F6} = rabbit_fifo_client:dequeue(<<"tag">>, settled, F5),
{ok, _, {_, _, _, _, msg2}, F7} = rabbit_fifo_client:dequeue(<<"tag">>, settled, F6),
{ok, _, {_, _, _, _, msg3}, _F8} = rabbit_fifo_client:dequeue(<<"tag">>, settled, F7),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
handles_reject_notification(Config) ->
@@ -355,8 +358,8 @@ handles_reject_notification(Config) ->
% the applied notification
_F2 = process_ra_events(receive_ra_events(1, 0), F1),
- ra:stop_server(ServerId1),
- ra:stop_server(ServerId2),
+ rabbit_quorum_queue:stop_server(ServerId1),
+ rabbit_quorum_queue:stop_server(ServerId2),
ok.
discard(Config) ->
@@ -373,7 +376,7 @@ discard(Config) ->
#{queue_resource => discard,
dead_letter_handler =>
{?MODULE, dead_letter_handler, [self()]}}}},
- _ = ra:start_server(Conf),
+ _ = rabbit_quorum_queue:start_server(Conf),
ok = ra:trigger_election(ServerId),
_ = ra:members(ServerId),
@@ -391,7 +394,7 @@ discard(Config) ->
flush(),
exit(dead_letter_timeout)
end,
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
cancel_checkout(Config) ->
@@ -455,7 +458,7 @@ untracked_enqueue(Config) ->
timer:sleep(100),
F0 = rabbit_fifo_client:init(ClusterName, [ServerId]),
{ok, _, {_, _, _, _, msg1}, _F5} = rabbit_fifo_client:dequeue(<<"tag">>, settled, F0),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
@@ -470,7 +473,7 @@ flow(Config) ->
{slow, F4} = rabbit_fifo_client:enqueue(m4, F3),
{_, _, F5} = process_ra_events(receive_ra_events(4, 0), F4),
{ok, _} = rabbit_fifo_client:enqueue(m5, F5),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
test_queries(Config) ->
@@ -504,7 +507,7 @@ test_queries(Config) ->
fun rabbit_fifo:query_processes/1),
?assertEqual(2, length(Processes)),
P ! stop,
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
dead_letter_handler(Pid, Msgs) ->
@@ -527,7 +530,7 @@ dequeue(Config) ->
{_, _, F4} = process_ra_events(receive_ra_events(1, 0), F4_),
{ok, _, {_, _, MsgId, _, msg2}, F5} = rabbit_fifo_client:dequeue(Tag, unsettled, F4),
{_F6, _A} = rabbit_fifo_client:settle(Tag, [MsgId], F5),
- ra:stop_server(ServerId),
+ rabbit_quorum_queue:stop_server(ServerId),
ok.
conf(ClusterName, UId, ServerId, _, Peers) ->
@@ -641,7 +644,8 @@ validate_process_down(Name, Num) ->
end.
start_cluster(ClusterName, ServerIds, RaFifoConfig) ->
- {ok, Started, _} = ra:start_cluster(ClusterName#resource.name,
+ {ok, Started, _} = ra:start_cluster(?RA_SYSTEM,
+ ClusterName#resource.name,
{module, rabbit_fifo, RaFifoConfig},
ServerIds),
?assertEqual(length(Started), length(ServerIds)),
diff --git a/deps/rabbit/test/rabbit_fifo_prop_SUITE.erl b/deps/rabbit/test/rabbit_fifo_prop_SUITE.erl
index 859db2178f..85881f68cd 100644
--- a/deps/rabbit/test/rabbit_fifo_prop_SUITE.erl
+++ b/deps/rabbit/test/rabbit_fifo_prop_SUITE.erl
@@ -504,7 +504,7 @@ snapshots(_Config) ->
collect({log_size, length(O)},
snapshots_prop(Config, O)))
end)
- end, [], 2500).
+ end, [], 1000).
single_active(_Config) ->
Size = 2000,
@@ -1046,7 +1046,7 @@ handle_op({input_event, requeue}, #t{effects = Effs} = T) ->
handle_op({input_event, Settlement}, #t{effects = Effs,
down = Down} = T) ->
case queue:out(Effs) of
- {{value, {settle, MsgIds, CId}}, Q} ->
+ {{value, {settle, CId, MsgIds}}, Q} ->
Cmd = case Settlement of
settle -> rabbit_fifo:make_settle(CId, MsgIds);
return -> rabbit_fifo:make_return(CId, MsgIds);
@@ -1097,7 +1097,7 @@ do_apply(Cmd, #t{effects = Effs,
end.
enq_effs([], Q) -> Q;
-enq_effs([{send_msg, P, {delivery, CTag, Msgs}, ra_event} | Rem], Q) ->
+enq_effs([{send_msg, P, {delivery, CTag, Msgs}, _Opts} | Rem], Q) ->
MsgIds = [I || {I, _} <- Msgs],
%% always make settle commands by default
%% they can be changed depending on the input event later
diff --git a/deps/rabbit/test/rabbit_foo_protocol_connection_info.erl b/deps/rabbit/test/rabbit_foo_protocol_connection_info.erl
index 937558aba8..12e5f0e8ac 100644
--- a/deps/rabbit/test/rabbit_foo_protocol_connection_info.erl
+++ b/deps/rabbit/test/rabbit_foo_protocol_connection_info.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_foo_protocol_connection_info).
diff --git a/deps/rabbit/test/rabbit_ha_test_consumer.erl b/deps/rabbit/test/rabbit_ha_test_consumer.erl
index 2467e40028..b50ef3e323 100644
--- a/deps/rabbit/test/rabbit_ha_test_consumer.erl
+++ b/deps/rabbit/test/rabbit_ha_test_consumer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_ha_test_consumer).
@@ -51,15 +51,9 @@ run(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume) ->
%% counter.
if
MsgNum + 1 == LowestSeen ->
- error_logger:info_msg("recording ~w left ~w",
- [MsgNum, MsgsToConsume]),
run(TestPid, Channel, Queue,
CancelOnFailover, MsgNum, MsgsToConsume - 1);
MsgNum >= LowestSeen ->
- error_logger:info_msg(
- "consumer ~p on ~p ignoring redelivered msg ~p"
- "lowest seen ~w~n",
- [self(), Channel, MsgNum, LowestSeen]),
true = Redelivered, %% ASSERTION
run(TestPid, Channel, Queue,
CancelOnFailover, LowestSeen, MsgsToConsume);
diff --git a/deps/rabbit/test/rabbit_ha_test_producer.erl b/deps/rabbit/test/rabbit_ha_test_producer.erl
index ed6969debe..2b53f5d0f1 100644
--- a/deps/rabbit/test/rabbit_ha_test_producer.erl
+++ b/deps/rabbit/test/rabbit_ha_test_producer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_ha_test_producer).
diff --git a/deps/rabbit/test/rabbit_msg_record_SUITE.erl b/deps/rabbit/test/rabbit_msg_record_SUITE.erl
index a82ba7481d..7a8447ed02 100644
--- a/deps/rabbit/test/rabbit_msg_record_SUITE.erl
+++ b/deps/rabbit/test/rabbit_msg_record_SUITE.erl
@@ -5,8 +5,8 @@
-export([
]).
--include("rabbit.hrl").
--include("rabbit_framing.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("amqp10_common/include/amqp10_framing.hrl").
@@ -24,6 +24,7 @@ all() ->
all_tests() ->
[
ampq091_roundtrip,
+ unsupported_091_header_is_dropped,
message_id_ulong,
message_id_uuid,
message_id_binary,
@@ -90,6 +91,22 @@ ampq091_roundtrip(_Config) ->
test_amqp091_roundtrip(#'P_basic'{}, Payload),
ok.
+unsupported_091_header_is_dropped(_Config) ->
+ Props = #'P_basic'{
+ headers = [
+ {<<"x-received-from">>, array, []}
+ ]
+ },
+ MsgRecord0 = rabbit_msg_record:from_amqp091(Props, <<"payload">>),
+ MsgRecord = rabbit_msg_record:init(
+ iolist_to_binary(rabbit_msg_record:to_iodata(MsgRecord0))),
+ % meck:unload(),
+ {PropsOut, <<"payload">>} = rabbit_msg_record:to_amqp091(MsgRecord),
+
+ ?assertMatch(#'P_basic'{headers = undefined}, PropsOut),
+
+ ok.
+
message_id_ulong(_Config) ->
Num = 9876789,
ULong = erlang:integer_to_binary(Num),
diff --git a/deps/rabbit/test/rabbit_stream_coordinator_SUITE.erl b/deps/rabbit/test/rabbit_stream_coordinator_SUITE.erl
new file mode 100644
index 0000000000..316d6c262b
--- /dev/null
+++ b/deps/rabbit/test/rabbit_stream_coordinator_SUITE.erl
@@ -0,0 +1,1197 @@
+-module(rabbit_stream_coordinator_SUITE).
+
+-compile(export_all).
+
+-export([
+ ]).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit/src/rabbit_stream_coordinator.hrl").
+
+%%%===================================================================
+%%% Common Test callbacks
+%%%===================================================================
+
+all() ->
+ [
+ {group, tests}
+ ].
+
+
+all_tests() ->
+ [
+ new_stream,
+ leader_down,
+ leader_down_scenario_1,
+ replica_down,
+ add_replica,
+ delete_stream,
+ delete_replica_leader,
+ delete_replica,
+ delete_two_replicas,
+ delete_replica_2,
+ leader_start_failed
+ ].
+
+groups() ->
+ [
+ {tests, [], all_tests()}
+ ].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_Group, Config) ->
+ Config.
+
+end_per_group(_Group, _Config) ->
+ ok.
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_TestCase, _Config) ->
+ ok.
+
+%%%===================================================================
+%%% Test cases
+%%%===================================================================
+
+update_stream(M, C, S) ->
+ rabbit_stream_coordinator:update_stream(M, C, S).
+
+evaluate_stream(M, S, A) ->
+ rabbit_stream_coordinator:evaluate_stream(M, S, A).
+
+new_stream(_) ->
+ [N1, N2, N3] = Nodes = [r@n1, r@n2, r@n3],
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ Name = list_to_binary(StreamId),
+ TypeState = #{name => StreamId,
+ nodes => Nodes},
+ Q = new_q(Name, TypeState),
+ From = {self(), make_ref()},
+ Meta = #{system_time => ?LINE,
+ from => From},
+ S0 = update_stream(Meta, {new_stream, StreamId,
+ #{leader_node => N1,
+ queue => Q}}, undefined),
+ E = 1,
+ %% ready means a new leader has been chosen
+ %% and the epoch incremented
+ ?assertMatch(#stream{nodes = Nodes,
+ members = #{N1 := #member{role = {writer, E},
+ current = undefined,
+ state = {ready, E}},
+ N2 := #member{role = {replica, E},
+ current = undefined,
+ state = {ready, E}},
+ N3 := #member{role = {replica, E},
+ current = undefined,
+ state = {ready, E}}}},
+ S0),
+
+ %% we expect the next action to be starting the writer
+ Idx1 = ?LINE,
+ Meta1 = meta(Idx1),
+ {S1, Actions} = evaluate_stream(Meta1, S0, []),
+ ?assertMatch([{aux, {start_writer, StreamId,
+ #{node := N1, epoch := E, index := _},
+ #{epoch := E,
+ leader_node := N1,
+ replica_nodes := [N2, N3]}}}],
+ Actions),
+ ?assertMatch(#stream{nodes = Nodes,
+ members = #{N1 := #member{role = {writer, E},
+ current = {starting, Idx1},
+ state = {ready, E}}}},
+
+ S1),
+
+ E1LeaderPid = fake_pid(N1),
+ Idx2 = ?LINE,
+ Meta2 = meta(Idx2),
+ S2 = update_stream(Meta2, {member_started, StreamId,
+ #{epoch => E,
+ index => Idx1,
+ pid => E1LeaderPid}}, S1),
+ ?assertMatch(#stream{nodes = Nodes,
+ epoch = E,
+ members = #{N1 :=
+ #member{role = {writer, E},
+ current = undefined,
+ state = {running, E, E1LeaderPid}}}},
+ S2),
+ Idx3 = ?LINE,
+ {S3, Actions2} = evaluate_stream(meta(Idx3), S2, []),
+ ?assertMatch([{aux, {start_replica, StreamId, #{node := N2},
+ #{epoch := E,
+ leader_pid := E1LeaderPid,
+ leader_node := N1}}},
+ {aux, {start_replica, StreamId, #{node := N3},
+ #{epoch := E,
+ leader_pid := E1LeaderPid,
+ leader_node := N1}}},
+ {aux, {update_mnesia, _, _, _}},
+ %% we reply to the caller once the leader has started
+ {reply, From, {wrap_reply, {ok, E1LeaderPid}}}
+ ], lists:sort(Actions2)),
+
+ ?assertMatch(#stream{nodes = Nodes,
+ members = #{N1 := #member{role = {writer, E},
+ current = undefined,
+ state = {running, E, E1LeaderPid}},
+ N2 := #member{role = {replica, E},
+ current = {starting, Idx3},
+ state = {ready, E}},
+ N3 := #member{role = {replica, E},
+ current = {starting, Idx3},
+ state = {ready, E}}}},
+ S3),
+ R1Pid = fake_pid(N2),
+ S4 = update_stream(Meta, {member_started, StreamId,
+ #{epoch => E, index => Idx3, pid => R1Pid}}, S3),
+ {S5, []} = evaluate_stream(meta(?LINE), S4, []),
+ R2Pid = fake_pid(N3),
+ S6 = update_stream(Meta, {member_started, StreamId,
+ #{epoch => E, index => Idx3, pid => R2Pid}}, S5),
+ {S7, []} = evaluate_stream(meta(?LINE), S6, []),
+ %% actions should have start_replica requests
+ ?assertMatch(#stream{nodes = Nodes,
+ members = #{N1 := #member{role = {writer, E},
+ current = undefined,
+ state = {running, E, E1LeaderPid}},
+ N2 := #member{role = {replica, E},
+ current = undefined,
+ state = {running, E, R1Pid}},
+ N3 := #member{role = {replica, E},
+ current = undefined,
+ state = {running, E, R2Pid}}}},
+ S7),
+
+ ok.
+
+leader_down(_) ->
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = ReplicaPids = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, ReplicaPids),
+ S1 = update_stream(meta(?LINE), {down, LeaderPid, boom}, S0),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {writer, E},
+ current = undefined,
+ target = stopped,
+ state = {down, E}},
+ N2 := #member{role = {replica, E},
+ target = stopped,
+ current = undefined,
+ state = {running, E, Replica1}},
+ N3 := #member{role = {replica, E},
+ target = stopped,
+ current = undefined,
+ state = {running, E, Replica2}}}},
+ S1),
+ Idx2 = ?LINE,
+ {S2, Actions} = evaluate_stream(meta(Idx2), S1, []),
+ %% expect all members to be stopping now
+ %% replicas will receive downs however as will typically exit if leader does
+ %% this is ok
+ ?assertMatch(
+ [{aux, {stop, StreamId,
+ #{node := N1, epoch := E, index := Idx2},
+ #{epoch := E}}},
+ {aux, {stop, StreamId,
+ #{node := N2, epoch := E, index := Idx2},
+ #{epoch := E}}},
+ {aux, {stop, StreamId,
+ #{node := N3, epoch := E, index := Idx2},
+ #{epoch := E}}}], lists:sort(Actions)),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {writer, E},
+ current = {stopping, Idx2},
+ state = {down, E}},
+ N2 := #member{role = {replica, E},
+ current = {stopping, Idx2},
+ state = {running, E, Replica1}},
+ N3 := #member{role = {replica, E},
+ current = {stopping, Idx2},
+ state = {running, E, Replica2}}}},
+ S2),
+
+ %% idempotency check
+ {S2, []} = evaluate_stream(meta(?LINE), S2, []),
+ N2Tail = {E, 101},
+ S3 = update_stream(meta(?LINE), {member_stopped, StreamId,
+ #{node => N2,
+ index => Idx2,
+ epoch => E,
+ tail => N2Tail}}, S2),
+ ?assertMatch(#stream{members = #{N2 := #member{role = {replica, E},
+ current = undefined,
+ state = {stopped, E, N2Tail}}}},
+ S3),
+ {S3, []} = evaluate_stream(meta(?LINE), S3, []),
+ N3Tail = {E, 102},
+ #{index := Idx4} = Meta4 = meta(?LINE),
+ S4 = update_stream(Meta4, {member_stopped, StreamId,
+ #{node => N3,
+ index => Idx2,
+ epoch => E,
+ tail => N3Tail}}, S3),
+ E2 = E + 1,
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = {stopping, Idx2},
+ state = {down, E}},
+ N2 := #member{role = {replica, E2},
+ current = undefined,
+ state = {ready, E2}},
+ %% N3 has the higher offset so should
+ %% be selected as writer of E2
+ N3 := #member{role = {writer, E2},
+ current = undefined,
+ state = {ready, E2}}}},
+ S4),
+ {S5, Actions4} = evaluate_stream(Meta4, S4, []),
+ %% new leader has been selected so should be started
+ ?assertMatch([{aux, {start_writer, StreamId, #{node := N3},
+ #{leader_node := N3}}}],
+ lists:sort(Actions4)),
+ ?assertMatch(#stream{epoch = E2}, S5),
+
+ E2LeaderPid = fake_pid(n3),
+ #{index := Idx6} = Meta6 = meta(?LINE),
+ S6 = update_stream(Meta6, {member_started, StreamId,
+ #{epoch => E2,
+ index => Idx4,
+ pid => E2LeaderPid}}, S5),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = {stopping, Idx2},
+ state = {down, E}},
+ N2 := #member{role = {replica, E2},
+ current = undefined,
+ state = {ready, E2}},
+ %% N3 has the higher offset so should
+ %% be selected as writer of E2
+ N3 := #member{role = {writer, E2},
+ current = undefined,
+ state = {running, E2, E2LeaderPid}}}},
+ S6),
+ {S7, Actions6} = evaluate_stream(Meta6, S6, []),
+ ?assertMatch([
+ {aux, {start_replica, StreamId,
+ #{node := N2},
+ #{leader_pid := E2LeaderPid}}},
+ {aux, {update_mnesia, _, _, _}}
+ ],
+ lists:sort(Actions6)),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = {stopping, _},
+ state = {down, E}},
+ N2 := #member{role = {replica, E2},
+ current = {starting, Idx6},
+ state = {ready, E2}},
+ N3 := #member{role = {writer, E2},
+ current = undefined,
+ state = {running, E2, E2LeaderPid}}}},
+ S7),
+ E2RepllicaN2Pid = fake_pid(n2),
+ S8 = update_stream(meta(?LINE), {member_started, StreamId,
+ #{epoch => E2,
+ index => Idx6,
+ pid => E2RepllicaN2Pid}}, S7),
+ ?assertMatch(#stream{members = #{N2 := #member{role = {replica, E2},
+ current = undefined,
+ state = {running, E2, E2RepllicaN2Pid}}}},
+ S8),
+ %% nothing to do
+ {S8, []} = evaluate_stream(meta(?LINE), S8, []),
+
+ #{index := Idx9} = Meta9 = meta(?LINE),
+ S9 = update_stream(Meta9, {action_failed, StreamId,
+ #{action => stopping,
+ index => Idx2,
+ node => N1,
+ epoch => E}}, S8),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = undefined,
+ state = {down, E}}}},
+ S9),
+
+ {S10, Actions9} = evaluate_stream(Meta9, S9, []),
+ %% retries action
+ ?assertMatch([{aux, {stop, StreamId, #{node := N1, epoch := E2}, _}}],
+ lists:sort(Actions9)),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = {stopping, Idx9},
+ state = {down, E}}}},
+ S10),
+
+ %% now finally succeed in stopping the old writer
+ N1Tail = {1, 107},
+ S11 = update_stream(meta(?LINE),
+ {member_stopped, StreamId, #{node => N1,
+ index => Idx9,
+ epoch => E2,
+ tail => N1Tail}}, S10),
+ %% skip straight to ready as cluster is already operative
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = undefined,
+ state = {ready, E2}}}},
+ S11),
+
+ {S12, Actions11} = evaluate_stream(meta(?LINE), S11, []),
+ ?assertMatch([{aux, {start_replica, StreamId, #{node := N1},
+ #{leader_pid := E2LeaderPid}}}],
+ lists:sort(Actions11)),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = {starting, _},
+ state = {ready, E2}}}},
+ S12),
+ ok.
+
+replica_down(_) ->
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = ReplicaPids = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, ReplicaPids),
+ S1 = update_stream(meta(?LINE), {down, Replica1, boom}, S0),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {writer, E},
+ current = undefined,
+ state = {running, E, LeaderPid}},
+ N2 := #member{role = {replica, E},
+ current = undefined,
+ state = {down, E}},
+ N3 := #member{role = {replica, E},
+ current = undefined,
+ state = {running, E, Replica2}}}},
+ S1),
+ {S2, Actions} = evaluate_stream(meta(?LINE), S1, []),
+ ?assertMatch([
+ {aux, {start_replica, StreamId, #{node := N2},
+ #{leader_pid := LeaderPid}}}
+ ],
+ lists:sort(Actions)),
+ ?assertMatch(#stream{members = #{N2 := #member{role = {replica, E},
+ current = {starting, _},
+ state = {down, E}}
+ }},
+ S2),
+ ok.
+
+leader_start_failed(_) ->
+
+ %% after a leader is selected we need to handle the case where the leader
+ %% start fails
+ %% this can happen if a node hosting the leader disconnects then connects
+ %% then disconnects again (rabbit seems to do this sometimes).
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = ReplicaPids = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, ReplicaPids),
+ Idx2 = ?LINE,
+ S1 = update_stream(meta(Idx2), {down, LeaderPid, boom}, S0),
+ {S2, _Actions} = evaluate_stream(meta(Idx2), S1, []),
+ %% leader was down but a temporary reconnection allowed the stop to complete
+ S3 = update_stream(meta(?LINE),
+ {member_stopped, StreamId, #{node => N1,
+ index => Idx2,
+ epoch => E,
+ tail => {1, 2}}}, S2),
+
+ {S3, []} = evaluate_stream(meta(?LINE), S3, []),
+ Meta4 = meta(?LINE),
+ S4 = update_stream(Meta4,
+ {member_stopped, StreamId, #{node => N2,
+ index => Idx2,
+ epoch => E,
+ tail => {1, 1}}}, S3),
+ E2 = E+1,
+ {S5, Actions4} = evaluate_stream(Meta4, S4, []),
+ ?assertMatch([{aux, {start_writer, StreamId, _,
+ #{epoch := E2,
+ leader_node := N1}}}],
+ lists:sort(Actions4)),
+ #{index := Idx4} = Meta4,
+ S6 = update_stream(meta(?LINE),
+ {action_failed, StreamId, #{node => N1,
+ index => Idx4,
+ action => starting,
+ epoch => E2}}, S5),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {writer, E2},
+ current = undefined,
+ target = stopped,
+ state = {ready, E2}},
+ N2 := #member{role = {replica, E2},
+ target = stopped,
+ current = undefined,
+ state = {ready, E2}},
+ N3 := #member{role = {replica, E2},
+ target = stopped,
+ current = {stopping, _},
+ state = {running, E, _}}}},
+ S6),
+ % E3 = E2+1,
+ Idx7 = ?LINE,
+ {S7, Actions6} = evaluate_stream(meta(Idx7), S6, []),
+ ?assertMatch([{aux, {stop, StreamId, #{node := N1, epoch := E2}, _}},
+ {aux, {stop, StreamId, #{node := N2, epoch := E2}, _}}
+ ], lists:sort(Actions6)),
+ %% late stop from prior epoch - need to run stop again to make sure
+ Meta8 = meta(?LINE),
+ S8 = update_stream(Meta8,
+ {member_stopped, StreamId, #{node => N3,
+ index => Idx2,
+ epoch => E,
+ tail => {1, 1}}}, S7),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {writer, E2},
+ current = {stopping, _},
+ target = stopped,
+ state = {ready, E2}},
+ N2 := #member{role = {replica, E2},
+ target = stopped,
+ current = {stopping, _},
+ state = {ready, E2}},
+ N3 := #member{role = {replica, E2},
+ target = stopped,
+ current = undefined,
+ state = {stopped, E, _}}}},
+ S8),
+ {_S9, Actions8} = evaluate_stream(Meta8, S8, []),
+ ?assertMatch([{aux, {stop, StreamId, #{node := N3, epoch := E2}, _}}
+ ], lists:sort(Actions8)),
+
+
+ ok.
+
+leader_down_scenario_1(_) ->
+ %% leader ended up in a stopped state in epoch 2 but on ereplica was
+ %% in ready, 2 and the other down 1
+
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = ReplicaPids = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, ReplicaPids),
+ Idx1 = ?LINE,
+ S1 = update_stream(meta(Idx1), {down, LeaderPid, boom}, S0),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {writer, E},
+ current = undefined,
+ state = {down, E}},
+ N2 := #member{role = {replica, E},
+ current = undefined,
+ state = {running, E, Replica1}},
+ N3 := #member{role = {replica, E},
+ current = undefined,
+ state = {running, E, Replica2}}}},
+ S1),
+ {S2, Actions} = evaluate_stream(meta(Idx1), S1, []),
+ %% expect all members to be stopping now
+ %% replicas will receive downs however as will typically exit if leader does
+ %% this is ok
+ ?assertMatch([{aux, {stop, StreamId, #{node := N1, epoch := E2}, _}},
+ {aux, {stop, StreamId, #{node := N2, epoch := E2}, _}},
+ {aux, {stop, StreamId, #{node := N3, epoch := E2}, _}}],
+ lists:sort(Actions)),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {writer, E},
+ current = {stopping, Idx1},
+ state = {down, E}},
+ N2 := #member{role = {replica, E},
+ current = {stopping, Idx1},
+ state = {running, E, Replica1}},
+ N3 := #member{role = {replica, E},
+ current = {stopping, Idx1},
+ state = {running, E, Replica2}}}},
+ S2),
+
+ %% idempotency check
+ {S2, []} = evaluate_stream(meta(?LINE), S2, []),
+ N2Tail = {E, 101},
+ S3 = update_stream(meta(?LINE), {member_stopped, StreamId, #{node => N2,
+ index => Idx1,
+ epoch => E,
+ tail => N2Tail}}, S2),
+ ?assertMatch(#stream{members = #{N2 := #member{role = {replica, E},
+ current = undefined,
+ state = {stopped, E, N2Tail}}}},
+ S3),
+ {S3, []} = evaluate_stream(meta(?LINE), S3, []),
+ N3Tail = {E, 102},
+ Meta4 = meta(?LINE),
+ S4 = update_stream(Meta4, {member_stopped, StreamId, #{node => N3,
+ index => Idx1,
+ epoch => E,
+ tail => N3Tail}}, S3),
+ E2 = E + 1,
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = {stopping, _},
+ state = {down, E}},
+ N2 := #member{role = {replica, E2},
+ current = undefined,
+ state = {ready, E2}},
+ %% N3 has the higher offset so should
+ %% be selected as writer of E2
+ N3 := #member{role = {writer, E2},
+ current = undefined,
+ state = {ready, E2}}}},
+ S4),
+ {S5, Actions4} = evaluate_stream(Meta4, S4, []),
+ %% new leader has been selected so should be started
+ ?assertMatch([{aux, {start_writer, StreamId, _Args, #{leader_node := N3}}}],
+ lists:sort(Actions4)),
+ ?assertMatch(#stream{epoch = E2}, S5),
+
+ E2LeaderPid = fake_pid(n3),
+ Meta6 = meta(?LINE),
+ S6 = update_stream(Meta6, {member_started, StreamId,
+ Meta4#{epoch => E2, pid => E2LeaderPid}}, S5),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = {stopping, _},
+ state = {down, E}},
+ N2 := #member{role = {replica, E2},
+ current = undefined,
+ state = {ready, E2}},
+ %% N3 has the higher offset so should
+ %% be selected as writer of E2
+ N3 := #member{role = {writer, E2},
+ current = undefined,
+ state = {running, E2, E2LeaderPid}}}},
+ S6),
+ {S6b, Actions6} = evaluate_stream(Meta6, S6, []),
+ ?assertMatch([
+ {aux, {start_replica, StreamId, #{node := N2}, _}},
+ {aux, {update_mnesia, _, _, _}}
+ ],
+ lists:sort(Actions6)),
+
+ #{index := Idx7} = Meta7 = meta(?LINE),
+ S7 = update_stream(Meta7, {down, E2LeaderPid, boom}, S6b),
+ {S8, Actions7} = evaluate_stream(Meta7, S7, []),
+ ?assertMatch([{aux, {stop, StreamId, #{node := N3, epoch := E2}, _}}],
+ lists:sort(Actions7)),
+ ?assertMatch(#stream{members = #{N1 := #member{role = {replica, E2},
+ current = {stopping, _},
+ state = {down, E}},
+ N2 := #member{role = {replica, E2},
+ current = {starting, _},
+ state = {ready, E2}},
+ N3 := #member{role = {writer, E2},
+ current = {stopping, Idx7},
+ state = {down, E2}}}},
+ S8),
+ %% writer is stopped before the ready replica has been started
+ S9 = update_stream(meta(?LINE), {member_stopped, StreamId, #{node => N3,
+ index => Idx7,
+ epoch => E2,
+ tail => N3Tail}},
+ S8),
+ ?assertMatch(#stream{members = #{N3 := #member{role = {writer, E2},
+ current = undefined,
+ state = {stopped, E2, N3Tail}}}},
+ S9),
+ {S10, []} = evaluate_stream(meta(?LINE), S9, []),
+ #{index := Idx12} = Meta12 = meta(?LINE),
+ S11 = update_stream(Meta12, {action_failed, StreamId,
+ Meta6#{action => starting,
+ node => N2,
+ epoch => E2}},
+ S10),
+ ?assertMatch(#stream{members = #{N2 := #member{role = {replica, E2},
+ current = undefined,
+ state = {ready, E2}}}},
+ S11),
+ {S12, Actions11} = evaluate_stream(Meta12, S11, []),
+ ?assertMatch([{aux, {stop, StreamId, #{node := N2, epoch := E2}, _}}],
+ lists:sort(Actions11)),
+ ?assertMatch(#stream{members = #{N2 := #member{role = {replica, E2},
+ current = {stopping, Idx12},
+ state = {ready, E2}}}},
+ S12),
+ S13 = update_stream(meta(?LINE), {member_stopped, StreamId, #{node => N2,
+ index => Idx12,
+ epoch => E2,
+ tail => N2Tail}},
+ S12),
+ E3 = E2 + 1,
+ ?assertMatch(#stream{members = #{
+ N1 := #member{role = {replica, E3},
+ current = {stopping, Idx1},
+ state = {down, E}},
+ N2 := #member{role = {replica, E3},
+ current = undefined,
+ state = {ready, E3}},
+ N3 := #member{role = {writer, E3},
+ current = undefined,
+ state = {ready, E3}}
+ }},
+ S13),
+ ok.
+
+delete_stream(_) ->
+ %% leader ended up in a stopped state in epoch 2 but one replica was
+ %% in ready, 2 and the other down 1
+
+ % E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = ReplicaPids = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, ReplicaPids),
+ From = {self(), make_ref()},
+ Meta1 = (meta(?LINE))#{from => From},
+ S1 = update_stream(Meta1, {delete_stream, StreamId, #{}}, S0),
+ ?assertMatch(#stream{target = deleted,
+ members = #{N3 := #member{target = deleted,
+ current = undefined,
+ state = _},
+ N2 := #member{target = deleted,
+ current = undefined,
+ state = _},
+ N1 := #member{target = deleted,
+ current = undefined,
+ state = _}
+ }},
+ S1),
+ {S2, Actions1} = evaluate_stream(meta(?LINE), S1, []),
+ %% expect all members to be stopping now
+ %% replicas will receive downs however as will typically exit if leader does
+ %% this is ok
+ ?assertMatch([{aux, {delete_member, StreamId, #{node := N1}, _}},
+ {aux, {delete_member, StreamId, #{node := N2}, _}},
+ {aux, {delete_member, StreamId, #{node := N3}, _}}
+ % {reply, From, {wrap_reply, {ok, 0}}}
+ ],
+ lists:sort(Actions1)),
+ ?assertMatch(#stream{target = deleted,
+ members = #{N3 := #member{target = deleted,
+ current = {deleting, _},
+ state = _},
+ N2 := #member{target = deleted,
+ current = {deleting, _},
+ state = _},
+ N1 := #member{target = deleted,
+ current = {deleting, _},
+ state = _}
+ }},
+ S2),
+ S3 = update_stream(meta(?LINE), {member_deleted, StreamId, #{node => N1}},
+ S2),
+ ?assertMatch(#stream{target = deleted,
+ members = #{N2 := _, N3 := _} = M3}
+ when not is_map_key(N1, M3), S3),
+ {S4, []} = evaluate_stream(meta(?LINE), S3, []),
+ ?assertMatch(#stream{target = deleted,
+ members = #{N2 := _, N3 := _} = M3}
+ when not is_map_key(N1, M3), S4),
+ S5 = update_stream(meta(?LINE), {member_deleted, StreamId, #{node => N2}},
+ S4),
+ ?assertMatch(#stream{target = deleted,
+ members = #{N3 := _} = M5}
+ when not is_map_key(N2, M5), S5),
+ {S6, []} = evaluate_stream(meta(?LINE), S5, []),
+ S7 = update_stream(meta(?LINE), {member_deleted, StreamId, #{node => N3}},
+ S6),
+ ?assertEqual(undefined, S7),
+ %% idempotency test
+ _ = update_stream(Meta1, {delete_stream, StreamId, #{}}, S7),
+ ok.
+
+add_replica(_) ->
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ %% this is to be added
+ N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, [Replica1]),
+ From = {self(), make_ref()},
+ Meta1 = (meta(?LINE))#{from => From},
+ S1 = update_stream(Meta1, {add_replica, StreamId, #{node => N3}}, S0),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2, N3],
+ members = #{N1 := #member{target = stopped,
+ current = undefined,
+ state = {running, _, _}},
+ N2 := #member{target = stopped,
+ current = undefined,
+ state = {running, _, _}},
+ N3 := #member{target = stopped,
+ current = undefined,
+ state = {down, 0}}
+ }},
+ S1),
+ {S2, Actions1} = evaluate_stream(Meta1, S1, []),
+ ?assertMatch([{aux, {stop, StreamId, #{node := N1, epoch := E}, _}},
+ {aux, {stop, StreamId, #{node := N2, epoch := E}, _}},
+ {aux, {stop, StreamId, #{node := N3, epoch := E}, _}}],
+ lists:sort(Actions1)),
+ Idx1 = maps:get(index, Meta1),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2, N3],
+ members = #{N1 := #member{target = stopped,
+ current = {stopping, Idx1},
+ state = {running, _, _}},
+ N2 := #member{target = stopped,
+ current = {stopping, Idx1},
+ state = {running, _, _}},
+ N3 := #member{target = stopped,
+ current = {stopping, Idx1},
+ state = {down, 0}}
+ }},
+ S2),
+ N1Tail = {E, 101},
+ S3 = update_stream(meta(?LINE), {member_stopped, StreamId, #{node => N1,
+ index => Idx1,
+ epoch => E,
+ tail => N1Tail}},
+ S2),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2, N3],
+ members = #{N1 := #member{target = running,
+ current = undefined,
+ state = {stopped, _, _}},
+ N2 := #member{target = stopped,
+ current = {stopping, Idx1},
+ state = {running, _, _}},
+ N3 := #member{target = stopped,
+ current = {stopping, Idx1},
+ state = {down, 0}}
+ }}, S3),
+ {S3, []} = evaluate_stream(meta(?LINE), S3, []),
+ N2Tail = {E, 100},
+ S4 = update_stream(meta(?LINE), {member_stopped, StreamId, #{node => N2,
+ index => Idx1,
+ epoch => E,
+ tail => N2Tail}},
+ S3),
+ E2 = E + 1,
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2, N3],
+ members = #{N1 := #member{target = running,
+ current = undefined,
+ state = {ready, E2}},
+ N2 := #member{target = running,
+ current = undefined,
+ state = {ready, E2}},
+ N3 := #member{target = stopped,
+ current = {stopping, Idx1},
+ state = {down, 0}}
+ }}, S4),
+ Idx3 = ?LINE,
+ {S3, []} = evaluate_stream(meta(Idx3), S3, []),
+ {S5, Actions4} = evaluate_stream(meta(Idx3), S4, []),
+ ?assertMatch([{aux, {start_writer, StreamId, #{index := Idx3},
+ #{leader_node := N1}}}],
+ lists:sort(Actions4)),
+ ?assertMatch(#stream{epoch = E2}, S5),
+ S6 = update_stream(meta(?LINE), {member_stopped, StreamId, #{node => N3,
+ index => Idx1,
+ epoch => E,
+ tail => empty}},
+ S5),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2, N3],
+ members = #{N1 := #member{target = running,
+ current = {starting, Idx3},
+ role = {writer, _},
+ state = {ready, E2}},
+ N2 := #member{target = running,
+ current = undefined,
+ state = {ready, E2}},
+ N3 := #member{target = running,
+ current = undefined,
+ state = {ready, E2}}
+ }}, S6),
+ ok.
+
+delete_replica(_) ->
+ %% TOOD: replica and leader needs to be tested
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ %% this is to be added
+ N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, [Replica1, Replica2]),
+ From = {self(), make_ref()},
+ Idx1 = ?LINE,
+ Meta1 = (meta(Idx1))#{from => From},
+ S1 = update_stream(Meta1, {delete_replica, StreamId, #{node => N3}}, S0),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2],
+ members = #{N1 := #member{target = stopped,
+ current = undefined,
+ state = {running, _, _}},
+ N2 := #member{target = stopped,
+ current = undefined,
+ state = {running, _, _}},
+ N3 := #member{target = deleted,
+ current = undefined,
+ state = {running, _, _}}
+ }},
+ S1),
+ {S2, Actions1} = evaluate_stream(Meta1, S1, []),
+ ?assertMatch([{aux, {delete_member, StreamId, #{node := N3}, _}},
+ {aux, {stop, StreamId, #{node := N1, epoch := E}, _}},
+ {aux, {stop, StreamId, #{node := N2, epoch := E}, _}}],
+ lists:sort(Actions1)),
+ S3 = update_stream(meta(?LINE), {member_deleted, StreamId, #{node => N3}},
+ S2),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2],
+ members = #{N1 := #member{target = stopped,
+ current = {stopping, _},
+ state = {running, _, _}},
+ N2 := #member{target = stopped,
+ current = {stopping, _},
+ state = {running, _, _}}
+ } = Members}
+ when not is_map_key(N3, Members), S3),
+ {S3, []} = evaluate_stream(meta(?LINE), S3, []),
+ S4 = update_stream(meta(?LINE),
+ {member_stopped, StreamId, #{node => N1,
+ index => Idx1,
+ epoch => E,
+ tail => {E, 100}}},
+ S3),
+ {S4, []} = evaluate_stream(meta(?LINE), S4, []),
+ S5 = update_stream(meta(?LINE),
+ {member_stopped, StreamId, #{node => N2,
+ index => Idx1,
+ epoch => E,
+ tail => {E, 101}}},
+ S4),
+ {S6, Actions5} = evaluate_stream(meta(?LINE), S5, []),
+ E2 = E + 1,
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2],
+ members = #{N1 := #member{target = running,
+ current = undefined,
+ state = {ready, E2}},
+ N2 := #member{target = running,
+ role = {writer, E2},
+ current = {starting, _},
+ state = {ready, E2}}
+ }}, S6),
+ ?assertMatch([{aux, {start_writer, StreamId, _Args, #{nodes := [N1, N2]}}}
+ ], lists:sort(Actions5)),
+ {S4, []} = evaluate_stream(meta(?LINE), S4, []),
+ ok.
+
+delete_two_replicas(_) ->
+ %% There was a race condition on the rabbit_stream_queue_SUITE testcases delete_replica
+ %% and delete_last_replica. A replica can sometimes restart after deletion as it transitions
+ %% again to running state. This test reproduces it. See `rabbit_stream_coordinator.erl`
+ %% line 1039, the processing of `member_stopped` command. The new function `update_target`
+ %% ensures this transition never happens.
+ %% This test reproduces the trace that leads to that error.
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ %% this is to be added
+ N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, [Replica1, Replica2]),
+ From = {self(), make_ref()},
+ Idx1 = ?LINE,
+ Meta1 = (meta(Idx1))#{from => From},
+ S1 = update_stream(Meta1, {delete_replica, StreamId, #{node => N3}}, S0),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2],
+ members = #{N1 := #member{target = stopped,
+ current = undefined,
+ state = {running, _, _}},
+ N2 := #member{target = stopped,
+ current = undefined,
+ state = {running, _, _}},
+ N3 := #member{target = deleted,
+ current = undefined,
+ state = {running, _, _}}
+ }},
+ S1),
+ {S2, Actions1} = evaluate_stream(Meta1, S1, []),
+ ?assertMatch([{aux, {delete_member, StreamId, #{node := N3}, _}},
+ {aux, {stop, StreamId, #{node := N1, epoch := E}, _}},
+ {aux, {stop, StreamId, #{node := N2, epoch := E}, _}}],
+ lists:sort(Actions1)),
+
+ Idx2 = ?LINE,
+ Meta2 = (meta(Idx2))#{from => From},
+ S3 = update_stream(Meta2, {delete_replica, StreamId, #{node => N2}}, S2),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1],
+ members = #{N1 := #member{target = stopped,
+ current = {stopping, _},
+ state = {running, _, _}},
+ N2 := #member{target = deleted,
+ current = {stopping, _},
+ state = {running, _, _}},
+ N3 := #member{target = deleted,
+ current = {deleting, _},
+ state = {running, _, _}}
+ }},
+ S3),
+ {S4, []} = evaluate_stream(Meta2, S3, []),
+
+
+ Idx3 = ?LINE,
+ S5 = update_stream(meta(Idx3),
+ {member_stopped, StreamId, #{node => N2,
+ index => Idx1,
+ epoch => E,
+ tail => {E, 101}}},
+ S4),
+ %% A deleted member can never transition to another target.
+ ?assertMatch(#stream{members = #{N2 := #member{target = deleted,
+ current = undefined,
+ state = {stopped, _, _}}}},
+ S5),
+ ok.
+
+delete_replica_2(_) ->
+ %% replica is deleted before it has been fully started
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, Replica2] = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ %% this is to be added
+ N3 = node(Replica2),
+ %% set replicas back to starting state
+ #stream{id = StreamId,
+ members = Members00} = S00 = started_stream(StreamId, LeaderPid,
+ [Replica1, Replica2]),
+ Members = maps:map(fun (_, #member{role = {replica, _}} = M) ->
+ M#member{state = {ready, 1},
+ current = {starting, 1}};
+ (_, M) ->
+ M
+ end, Members00),
+ S0 = S00#stream{members = Members},
+ From = {self(), make_ref()},
+ Idx1 = ?LINE,
+ Meta1 = (meta(Idx1))#{from => From},
+ %% DELETE REPLICA
+ S1 = update_stream(Meta1, {delete_replica, StreamId, #{node => N3}}, S0),
+ ?assertMatch(#stream{target = running,
+ nodes = [N1, N2],
+ members = #{N1 := #member{target = stopped,
+ current = undefined,
+ state = {running, _, _}},
+ N2 := #member{target = stopped,
+ current = {starting, _},
+ state = {ready, _}},
+ N3 := #member{target = deleted,
+ current = {starting, _},
+ state = {ready, _}}
+ }},
+ S1),
+ Idx2 = ?LINE,
+ {S2, Actions1} = evaluate_stream(meta(Idx2), S1, []),
+ ?assertMatch([
+ % {aux, {delete_member, StreamId, #{node := N3}, _}},
+ {aux, {stop, StreamId, #{node := N1, epoch := E}, _}}],
+ lists:sort(Actions1)),
+ %% LEADER DOWN
+ Meta3 = #{index := _Idx3} = meta(?LINE),
+ S3 = update_stream(Meta3, {down, LeaderPid, normal}, S2),
+ ?assertMatch(#stream{target = running,
+ members = #{N1 := #member{target = stopped,
+ current = {stopping, _},
+ state = {down, _}},
+ N2 := #member{target = stopped,
+ current = {starting, _},
+ state = {ready, _}},
+ N3 := #member{target = deleted,
+ current = {starting, _},
+ state = {ready, _}}
+ }},
+ S3),
+ {S4, Actions4} = evaluate_stream(meta(?LINE), S3, []),
+ ?assertMatch([], Actions4),
+ %% LEADER STOPPED
+ Idx4 = ?LINE,
+ S5 = update_stream(meta(Idx4),
+ {member_stopped, StreamId, #{node => N1,
+ index => Idx2,
+ epoch => E,
+ tail => {E, 100}}},
+ S4),
+ ?assertMatch(#stream{members = #{N1 := #member{target = running,
+ current = undefined,
+ state = {stopped, _, _}}}},
+ S5),
+ {S6, Actions6} = evaluate_stream(meta(?LINE), S5, []),
+ ?assertMatch([], Actions6),
+ %% DELETED REPLICA START FAIL
+ Meta7 = meta(?LINE),
+ S7 = update_stream(Meta7, {action_failed, StreamId,
+ #{action => starting,
+ index => 1,
+ node => N3,
+ epoch => E}}, S6),
+ {S8, Actions8} = evaluate_stream(Meta7, S7, []),
+ ?assertMatch([{aux, {delete_member, _, #{node := N3}, _}}], Actions8),
+ %% OTHER REPLICA START FAIL
+ Meta9 = meta(?LINE),
+ S9 = update_stream(Meta9, {action_failed, StreamId,
+ #{action => starting,
+ index => 1,
+ node => N2,
+ epoch => E}}, S8),
+ {_S10, Actions10} = evaluate_stream(Meta9, S9, []),
+ ?assertMatch([{aux, {stop, _, _, _}} ], Actions10),
+ ok.
+
+delete_replica_leader(_) ->
+ %% TOOD: replica and leader needs to be tested
+ E = 1,
+ StreamId = atom_to_list(?FUNCTION_NAME),
+ LeaderPid = fake_pid(n1),
+ [Replica1, _Replica2] = [fake_pid(n2), fake_pid(n3)],
+ N1 = node(LeaderPid),
+ N2 = node(Replica1),
+ %% this is to be added
+ % N3 = node(Replica2),
+
+ S0 = started_stream(StreamId, LeaderPid, [Replica1]),
+ From = {self(), make_ref()},
+ Meta1 = (meta(?LINE))#{from => From},
+ S1 = update_stream(Meta1, {delete_replica, StreamId, #{node => N1}}, S0),
+ ?assertMatch(#stream{target = running,
+ nodes = [N2],
+ members = #{N1 := #member{target = deleted,
+ current = undefined,
+ state = {running, _, _}},
+ N2 := #member{target = stopped,
+ current = undefined,
+ state = {running, _, _}}
+ }},
+ S1),
+ Idx2 = ?LINE,
+ {S2, Actions1} = evaluate_stream(meta(Idx2), S1, []),
+ ?assertMatch([{aux, {delete_member, StreamId, #{node := N1}, _}},
+ {aux, {stop, StreamId, #{node := N2, epoch := E}, _}}],
+ lists:sort(Actions1)),
+ S3 = S2,
+ Idx4 = ?LINE,
+ S4 = update_stream(meta(Idx4),
+ {member_stopped, StreamId, #{node => N2,
+ index => Idx2,
+ epoch => E,
+ tail => {E, 100}}},
+ S3),
+ E2 = E+1,
+ ?assertMatch(#stream{target = running,
+ nodes = [N2],
+ members = #{N1 := #member{target = deleted,
+ current = {deleting, Idx2},
+ state = {running, _, _}},
+ N2 := #member{target = running,
+ role = {writer, E2},
+ current = undefined,
+ state = {ready, E2}}
+ }},
+ S4),
+ ok.
+
+meta(N) when is_integer(N) ->
+ #{index => N,
+ system_time => N + 1000}.
+
+started_stream(StreamId, LeaderPid, ReplicaPids) ->
+ E = 1,
+ Nodes = [node(LeaderPid) | [node(P) || P <- ReplicaPids]],
+ Conf = #{name => StreamId,
+ nodes => Nodes},
+
+ VHost = <<"/">>,
+ QName = #resource{kind = queue,
+ name = list_to_binary(StreamId),
+ virtual_host = VHost},
+ Members0 = #{node(LeaderPid) => #member{role = {writer, E},
+ node = node(LeaderPid),
+ state = {running, E, LeaderPid},
+ current = undefined}},
+ Members = lists:foldl(fun (R, Acc) ->
+ N = node(R),
+ Acc#{N => #member{role = {replica, E},
+ node = N,
+ state = {running, E, R},
+ current = undefined}}
+ end, Members0, ReplicaPids),
+
+
+ #stream{id = StreamId,
+ epoch = 1,
+ nodes = Nodes,
+ queue_ref = QName,
+ conf = Conf,
+ mnesia = {updated, 1},
+ members = Members}.
+
+new_q(Name, TypeState) ->
+ VHost = <<"/">>,
+ QName = #resource{kind = queue,
+ name = Name,
+ virtual_host = VHost},
+ amqqueue:set_type_state(
+ amqqueue:new_with_version(amqqueue_v2,
+ QName,
+ none,
+ true,
+ false,
+ none,
+ [],
+ VHost,
+ #{},
+ rabbit_stream_queue), TypeState).
+
+fake_pid(Node) ->
+ NodeBin = atom_to_binary(Node),
+ ThisNodeSize = size(term_to_binary(node())) + 1,
+ Pid = spawn(fun () -> ok end),
+ %% drop the local node data from a local pid
+ <<Pre:ThisNodeSize/binary, LocalPidData/binary>> = term_to_binary(Pid),
+ S = size(NodeBin),
+ %% get the encoding type of the pid
+ <<_:8, Type:8/unsigned, _/binary>> = Pre,
+ %% replace it with the incoming node binary
+ Final = <<131, Type, 100, S:16/unsigned, NodeBin/binary, LocalPidData/binary>>,
+ binary_to_term(Final).
+
+%% Utility
diff --git a/deps/rabbit/test/rabbit_stream_queue_SUITE.erl b/deps/rabbit/test/rabbit_stream_queue_SUITE.erl
index 401e470eea..12aadff011 100644
--- a/deps/rabbit/test/rabbit_stream_queue_SUITE.erl
+++ b/deps/rabbit/test/rabbit_stream_queue_SUITE.erl
@@ -1,16 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream_queue_SUITE).
@@ -19,45 +11,65 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
+-compile(nowarn_export_all).
-compile(export_all).
suite() ->
- [{timetrap, 5 * 60000}].
+ [{timetrap, 15 * 60000}].
all() ->
[
{group, single_node},
+ {group, single_node_parallel},
{group, cluster_size_2},
+ {group, cluster_size_2_parallel},
{group, cluster_size_3},
+ {group, cluster_size_3_1},
+ {group, cluster_size_3_2},
+ {group, cluster_size_3_parallel_1},
+ {group, cluster_size_3_parallel_2},
{group, unclustered_size_3_1},
{group, unclustered_size_3_2},
- {group, unclustered_size_3_3},
- {group, cluster_size_3_1}
+ {group, unclustered_size_3_3}
].
groups() ->
[
- {single_node, [], [restart_single_node] ++ all_tests()},
- {cluster_size_2, [], all_tests()},
- {cluster_size_3, [], all_tests() ++
- [delete_replica,
+ {single_node, [], [restart_single_node, recover]},
+ {single_node_parallel, [parallel], all_tests()},
+ {cluster_size_2, [], [recover]},
+ {cluster_size_2_parallel, [parallel], all_tests()},
+ {cluster_size_3, [],
+ [restart_coordinator_without_queues,
delete_down_replica,
- delete_classic_replica,
- delete_quorum_replica,
- consume_from_replica,
+ replica_recovery,
leader_failover,
- initial_cluster_size_one,
- initial_cluster_size_two,
- initial_cluster_size_one_policy,
- leader_locator_client_local,
- leader_locator_random,
- leader_locator_least_leaders,
- leader_locator_policy]},
+ leader_failover_dedupe,
+ add_replicas,
+ publish_coordinator_unavailable,
+ leader_locator_policy,
+ queue_size_on_declare]},
+ {cluster_size_3_1, [], [shrink_coordinator_cluster]},
+ {cluster_size_3_2, [], [recover,
+ declare_with_node_down]},
+ {cluster_size_3_parallel_1, [parallel], [delete_replica,
+ delete_last_replica,
+ delete_classic_replica,
+ delete_quorum_replica,
+ consume_from_replica,
+ initial_cluster_size_one,
+ initial_cluster_size_two,
+ initial_cluster_size_one_policy,
+ leader_locator_client_local,
+ declare_delete_same_stream,
+ leader_locator_random,
+ leader_locator_least_leaders]},
+ {cluster_size_3_parallel_2, [parallel], all_tests()},
{unclustered_size_3_1, [], [add_replica]},
{unclustered_size_3_2, [], [consume_without_local_replica]},
- {unclustered_size_3_3, [], [grow_coordinator_cluster]},
- {cluster_size_3_1, [], [shrink_coordinator_cluster]}
+ {unclustered_size_3_3, [], [grow_coordinator_cluster]}
].
all_tests() ->
@@ -70,10 +82,11 @@ all_tests() ->
delete_queue,
publish,
publish_confirm,
- recover,
consume_without_qos,
consume,
consume_offset,
+ consume_timestamp_offset,
+ consume_timestamp_last_offset,
basic_get,
consume_with_autoack,
consume_and_nack,
@@ -82,6 +95,7 @@ all_tests() ->
consume_from_last,
consume_from_next,
consume_from_default,
+ consume_from_relative_time_offset,
consume_credit,
consume_credit_out_of_order_ack,
consume_credit_multiple_ack,
@@ -90,8 +104,11 @@ all_tests() ->
max_age,
invalid_policy,
max_age_policy,
- max_segment_size_policy,
- purge
+ max_segment_size_bytes_policy,
+ purge,
+ update_retention_policy,
+ queue_info,
+ tracking_status
].
%% -------------------------------------------------------------------
@@ -108,12 +125,27 @@ init_per_suite(Config0) ->
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
+init_per_group(cluster_size_3_parallel = Group, Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip, "not mixed versions compatible"};
+ _ ->
+ init_per_group1(Group, Config)
+ end;
init_per_group(Group, Config) ->
+ init_per_group1(Group, Config).
+
+init_per_group1(Group, Config) ->
ClusterSize = case Group of
single_node -> 1;
+ single_node_parallel -> 1;
cluster_size_2 -> 2;
+ cluster_size_2_parallel -> 2;
cluster_size_3 -> 3;
+ cluster_size_3_parallel_1 -> 3;
+ cluster_size_3_parallel_2 -> 3;
cluster_size_3_1 -> 3;
+ cluster_size_3_2 -> 3;
unclustered_size_3_1 -> 3;
unclustered_size_3_2 -> 3;
unclustered_size_3_3 -> 3
@@ -145,9 +177,12 @@ init_per_group(Group, Config) ->
Config2, 0, application, set_env,
[rabbit, channel_tick_interval, 100]),
Config2;
- Skip ->
+ {skip, _} = Skip ->
end_per_group(Group, Config2),
- Skip
+ Skip;
+ Other ->
+ end_per_group(Group, Config2),
+ {skip, Other}
end
end.
@@ -166,7 +201,6 @@ merge_app_env(Config) ->
{rabbit, [{core_metrics_gc_interval, 100}]}).
end_per_testcase(Testcase, Config) ->
- rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_queues, []),
Config1 = rabbit_ct_helpers:run_steps(
Config,
rabbit_ct_client_helpers:teardown_steps()),
@@ -184,7 +218,8 @@ declare_args(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-max-length">>, long, 2000}])),
- assert_queue_type(Server, Q, rabbit_stream_queue).
+ assert_queue_type(Server, Q, rabbit_stream_queue),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
declare_max_age(Config) ->
Server = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
@@ -201,7 +236,8 @@ declare_max_age(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-max-age">>, longstr, <<"1Y">>}])),
- assert_queue_type(Server, Q, rabbit_stream_queue).
+ assert_queue_type(Server, Q, rabbit_stream_queue),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
declare_invalid_properties(Config) ->
Server = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
@@ -249,13 +285,24 @@ declare_queue(Config) ->
%% Test declare an existing queue
?assertEqual({'queue.declare_ok', Q, 0, 0},
- declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
+ ?assertMatch([_], find_queue_info(Config, [])),
+
+ %% Test declare an existing queue with different arguments
+ ?assertExit(_, declare(Ch, Q, [])),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
- ?assertMatch([_], rpc:call(Server, supervisor, which_children,
- [osiris_server_sup])),
+find_queue_info(Config, Keys) ->
+ find_queue_info(Config, 0, Keys).
- %% Test declare an existing queue with different arguments
- ?assertExit(_, declare(Ch, Q, [])).
+find_queue_info(Config, Node, Keys) ->
+ Name = ?config(queue_name, Config),
+ QName = rabbit_misc:r(<<"/">>, queue, Name),
+ Infos = rabbit_ct_broker_helpers:rpc(Config, Node, rabbit_amqqueue, info_all,
+ [<<"/">>, [name] ++ Keys]),
+ [Info] = [Props || Props <- Infos, lists:member({name, QName}, Props)],
+ Info.
delete_queue(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -267,6 +314,71 @@ delete_queue(Config) ->
?assertMatch(#'queue.delete_ok'{},
amqp_channel:call(Ch, #'queue.delete'{queue = Q})).
+add_replicas(Config) ->
+ [Server0, Server1, Server2] =
+ rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server0),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
+ {<<"x-initial-cluster-size">>, long, 1}])),
+
+ %% TODO: add lots of data so that replica is still out of sync when
+ %% second request comes in
+ NumMsgs = 1000,
+ Data = crypto:strong_rand_bytes(1000),
+ #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
+ amqp_channel:register_confirm_handler(Ch, self()),
+ [publish(Ch, Q, Data) || _ <- lists:seq(1, NumMsgs)],
+ %% should be sufficient for the next message to fall in the next
+ %% chunk
+ timer:sleep(100),
+ publish(Ch, Q, <<"last">>),
+ amqp_channel:wait_for_confirms(Ch, 30),
+ timer:sleep(1000),
+ ?assertEqual(ok,
+ rpc:call(Server0, rabbit_stream_queue, add_replica,
+ [<<"/">>, Q, Server1])),
+
+ timer:sleep(1000),
+
+ %% it is almost impossible to reliably catch this situation.
+ %% increasing number of messages published and the data size could help
+ % ?assertMatch({error, {disallowed, out_of_sync_replica}} ,
+ ?assertMatch(ok ,
+ rpc:call(Server0, rabbit_stream_queue, add_replica,
+ [<<"/">>, Q, Server2])),
+ timer:sleep(1000),
+ %% validate we can read the last entry
+ qos(Ch, 10, false),
+ amqp_channel:subscribe(
+ Ch, #'basic.consume'{queue = Q,
+ no_ack = false,
+ consumer_tag = <<"ctag">>,
+ arguments = [{<<"x-stream-offset">>, longstr, <<"last">>}]},
+ self()),
+ receive
+ #'basic.consume_ok'{consumer_tag = <<"ctag">>} ->
+ ok
+ end,
+ receive
+ {#'basic.deliver'{delivery_tag = DeliveryTag},
+ #amqp_msg{payload = <<"last">>}} ->
+ ok = amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DeliveryTag,
+ multiple = false})
+ after 60000 ->
+ flush(),
+ ?assertMatch(#'queue.delete_ok'{},
+ amqp_channel:call(Ch, #'queue.delete'{queue = Q})),
+ exit(deliver_timeout)
+ end,
+ % ?assertMatch({error, {disallowed, out_of_sync_replica}} ,
+ % rpc:call(Server0, rabbit_stream_queue, add_replica,
+ % [<<"/">>, Q, Server2])),
+ ?assertMatch(#'queue.delete_ok'{},
+ amqp_channel:call(Ch, #'queue.delete'{queue = Q})),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
add_replica(Config) ->
[Server0, Server1, Server2] =
rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -312,7 +424,7 @@ add_replica(Config) ->
[<<"/">>, Q, Server1])),
%% replicas must be recorded on the state, and if we publish messages then they must
%% be stored on disk
- check_leader_and_replicas(Config, Q, Server0, [Server1]),
+ check_leader_and_replicas(Config, [Server0, Server1]),
%% And if we try again? Idempotent
?assertEqual(ok, rpc:call(Server0, rabbit_stream_queue, add_replica,
[<<"/">>, Q, Server1])),
@@ -322,7 +434,8 @@ add_replica(Config) ->
rabbit_control_helper:command(start_app, Server2),
?assertEqual(ok, rpc:call(Server0, rabbit_stream_queue, add_replica,
[<<"/">>, Q, Server2])),
- check_leader_and_replicas(Config, Q, Server0, [Server1, Server2]).
+ check_leader_and_replicas(Config, [Server0, Server1, Server2]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
delete_replica(Config) ->
[Server0, Server1, Server2] =
@@ -331,7 +444,7 @@ delete_replica(Config) ->
Q = ?config(queue_name, Config),
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- check_leader_and_replicas(Config, Q, Server0, [Server1, Server2]),
+ check_leader_and_replicas(Config, [Server0, Server1, Server2]),
%% Not a member of the cluster, what would happen?
?assertEqual({error, node_not_running},
rpc:call(Server0, rabbit_stream_queue, delete_replica,
@@ -340,14 +453,39 @@ delete_replica(Config) ->
rpc:call(Server0, rabbit_stream_queue, delete_replica,
[<<"/">>, Q, Server1])),
%% check it's gone
- check_leader_and_replicas(Config, Q, Server0, [Server2]),
+ check_leader_and_replicas(Config, [Server0, Server2]),
%% And if we try again? Idempotent
?assertEqual(ok, rpc:call(Server0, rabbit_stream_queue, delete_replica,
[<<"/">>, Q, Server1])),
%% Delete the last replica
?assertEqual(ok, rpc:call(Server0, rabbit_stream_queue, delete_replica,
[<<"/">>, Q, Server2])),
- check_leader_and_replicas(Config, Q, Server0, []).
+ check_leader_and_replicas(Config, [Server0]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+delete_last_replica(Config) ->
+ [Server0, Server1, Server2] =
+ rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server0),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ check_leader_and_replicas(Config, [Server0, Server1, Server2]),
+ ?assertEqual(ok,
+ rpc:call(Server0, rabbit_stream_queue, delete_replica,
+ [<<"/">>, Q, Server1])),
+ ?assertEqual(ok,
+ rpc:call(Server0, rabbit_stream_queue, delete_replica,
+ [<<"/">>, Q, Server2])),
+ %% check they're gone
+ check_leader_and_replicas(Config, [Server0], members),
+ %% delete the last one
+ ?assertEqual({error, last_stream_member},
+ rpc:call(Server0, rabbit_stream_queue, delete_replica,
+ [<<"/">>, Q, Server0])),
+ %% It's still here
+ check_leader_and_replicas(Config, [Server0]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
grow_coordinator_cluster(Config) ->
[Server0, Server1, _Server2] =
@@ -371,7 +509,8 @@ grow_coordinator_cluster(Config) ->
_ ->
false
end
- end, 60000).
+ end, 60000),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
shrink_coordinator_cluster(Config) ->
[Server0, Server1, Server2] =
@@ -394,7 +533,8 @@ shrink_coordinator_cluster(Config) ->
_ ->
false
end
- end, 60000).
+ end, 60000),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
delete_classic_replica(Config) ->
[Server0, Server1, _Server2] =
@@ -409,7 +549,8 @@ delete_classic_replica(Config) ->
[<<"/">>, Q, 'zen@rabbit'])),
?assertEqual({error, classic_queue_not_supported},
rpc:call(Server0, rabbit_stream_queue, delete_replica,
- [<<"/">>, Q, Server1])).
+ [<<"/">>, Q, Server1])),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
delete_quorum_replica(Config) ->
[Server0, Server1, _Server2] =
@@ -424,7 +565,8 @@ delete_quorum_replica(Config) ->
[<<"/">>, Q, 'zen@rabbit'])),
?assertEqual({error, quorum_queue_not_supported},
rpc:call(Server0, rabbit_stream_queue, delete_replica,
- [<<"/">>, Q, Server1])).
+ [<<"/">>, Q, Server1])),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
delete_down_replica(Config) ->
[Server0, Server1, Server2] =
@@ -433,17 +575,56 @@ delete_down_replica(Config) ->
Q = ?config(queue_name, Config),
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- check_leader_and_replicas(Config, Q, Server0, [Server1, Server2]),
+ check_leader_and_replicas(Config, [Server0, Server1, Server2]),
ok = rabbit_ct_broker_helpers:stop_node(Config, Server1),
?assertEqual({error, node_not_running},
rpc:call(Server0, rabbit_stream_queue, delete_replica,
[<<"/">>, Q, Server1])),
%% check it isn't gone
- check_leader_and_replicas(Config, Q, Server0, [Server1, Server2]),
+ check_leader_and_replicas(Config, [Server0, Server1, Server2], members),
ok = rabbit_ct_broker_helpers:start_node(Config, Server1),
- ?assertEqual(ok,
- rpc:call(Server0, rabbit_stream_queue, delete_replica,
- [<<"/">>, Q, Server1])).
+ rabbit_ct_helpers:await_condition(
+ fun() ->
+ ok == rpc:call(Server0, rabbit_stream_queue, delete_replica,
+ [<<"/">>, Q, Server1])
+ end),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+publish_coordinator_unavailable(Config) ->
+ [Server0, Server1, Server2] =
+ rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server0),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ check_leader_and_replicas(Config, [Server0, Server1, Server2]),
+ ok = rabbit_ct_broker_helpers:stop_node(Config, Server1),
+ ok = rabbit_ct_broker_helpers:stop_node(Config, Server2),
+ rabbit_ct_helpers:await_condition(
+ fun () ->
+ N = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_mnesia, cluster_nodes, [running]),
+ length(N) == 1
+ end),
+ #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
+ amqp_channel:register_confirm_handler(Ch, self()),
+ publish(Ch, Q),
+ ?assertExit({{shutdown, {connection_closing, {server_initiated_close, 506, _}}}, _},
+ amqp_channel:wait_for_confirms(Ch, 60)),
+ ok = rabbit_ct_broker_helpers:start_node(Config, Server1),
+ ok = rabbit_ct_broker_helpers:start_node(Config, Server2),
+ rabbit_ct_helpers:await_condition(
+ fun () ->
+ Info = find_queue_info(Config, 0, [online]),
+ length(proplists:get_value(online, Info)) == 3
+ end),
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server0),
+ publish(Ch1, Q),
+
+ #'confirm.select_ok'{} = amqp_channel:call(Ch1, #'confirm.select'{}),
+ amqp_channel:register_confirm_handler(Ch1, self()),
+ publish(Ch1, Q),
+ amqp_channel:wait_for_confirms(Ch1, 30),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
publish(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -454,7 +635,8 @@ publish(Config) ->
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
publish(Ch, Q),
- quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]]).
+ quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
publish_confirm(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -468,7 +650,8 @@ publish_confirm(Config) ->
amqp_channel:register_confirm_handler(Ch, self()),
publish(Ch, Q),
amqp_channel:wait_for_confirms(Ch, 5),
- quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]]).
+ quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
restart_single_node(Config) ->
[Server] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -486,10 +669,26 @@ restart_single_node(Config) ->
quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
publish(Ch1, Q),
- quorum_queue_utils:wait_for_messages(Config, [[Q, <<"2">>, <<"2">>, <<"0">>]]).
+ quorum_queue_utils:wait_for_messages(Config, [[Q, <<"2">>, <<"2">>, <<"0">>]]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+%% the failing case for this test relies on a particular random condition
+%% please never consider this a flake
+declare_with_node_down(Config) ->
+ [Server1, Server2, Server3] = Servers = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server1),
+ rabbit_ct_broker_helpers:stop_node(Config, Server2),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
+ check_leader_and_replicas(Config, [Server1, Server3]),
+ rabbit_ct_broker_helpers:start_node(Config, Server2),
+ check_leader_and_replicas(Config, Servers),
+ ok.
recover(Config) ->
- [Server | _] = Servers = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ [Server | _] = Servers0 = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
Q = ?config(queue_name, Config),
@@ -498,13 +697,54 @@ recover(Config) ->
publish(Ch, Q),
quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]]),
+ Perm0 = permute(Servers0),
+ Servers = lists:nth(rand:uniform(length(Perm0)), Perm0),
+ %% Such a slow test, let's select a single random permutation and trust that over enough
+ %% ci rounds any failure will eventually show up
+
+ ct:pal("recover: running stop start for permutation ~w", [Servers]),
[rabbit_ct_broker_helpers:stop_node(Config, S) || S <- Servers],
[rabbit_ct_broker_helpers:start_node(Config, S) || S <- lists:reverse(Servers)],
+ ct:pal("recover: running stop waiting for messages ~w", [Servers]),
+ check_leader_and_replicas(Config, Servers0),
+ quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]], 60),
+
+ %% Another single random permutation
+ Perm1 = permute(Servers0),
+ Servers1 = lists:nth(rand:uniform(length(Perm1)), Perm1),
+
+ ct:pal("recover: running app stop start for permuation ~w", [Servers1]),
+ [rabbit_control_helper:command(stop_app, S) || S <- Servers1],
+ [rabbit_control_helper:command(start_app, S) || S <- lists:reverse(Servers1)],
+ ct:pal("recover: running app stop waiting for messages ~w", [Servers1]),
+ check_leader_and_replicas(Config, Servers0),
+ quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]], 60),
- quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
publish(Ch1, Q),
- quorum_queue_utils:wait_for_messages(Config, [[Q, <<"2">>, <<"2">>, <<"0">>]]).
+ quorum_queue_utils:wait_for_messages(Config, [[Q, <<"2">>, <<"2">>, <<"0">>]]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+restart_coordinator_without_queues(Config) ->
+ %% The coordinator failed to restart if stream queues were not present anymore, as
+ %% they wouldn't call recover in all nodes - only the local one was restarted so
+ %% the election wouldn't succeed. Fixed now, but this test checks for that failure
+ [Server | _] = Servers0 = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ publish_confirm(Ch, Q, [<<"msg">>]),
+ ?assertMatch(#'queue.delete_ok'{}, amqp_channel:call(Ch, #'queue.delete'{queue = Q})),
+
+ [rabbit_ct_broker_helpers:stop_node(Config, S) || S <- Servers0],
+ [rabbit_ct_broker_helpers:start_node(Config, S) || S <- lists:reverse(Servers0)],
+
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_without_qos(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -513,10 +753,11 @@ consume_without_qos(Config) ->
Q = ?config(queue_name, Config),
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
-
+
?assertExit({{shutdown, {server_initiated_close, 406, _}}, _},
amqp_channel:subscribe(Ch, #'basic.consume'{queue = Q, consumer_tag = <<"ctag">>},
- self())).
+ self())),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_without_local_replica(Config) ->
[Server0, Server1 | _] =
@@ -535,7 +776,8 @@ consume_without_local_replica(Config) ->
qos(Ch1, 10, false),
?assertExit({{shutdown, {server_initiated_close, 406, _}}, _},
amqp_channel:subscribe(Ch1, #'basic.consume'{queue = Q, consumer_tag = <<"ctag">>},
- self())).
+ self())),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -545,10 +787,7 @@ consume(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- publish(Ch, Q),
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg">>]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
qos(Ch1, 10, false),
@@ -562,7 +801,8 @@ consume(Config) ->
ok
after 5000 ->
exit(timeout)
- end.
+ end,
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_offset(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -572,11 +812,8 @@ consume_offset(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
Payload = << <<"1">> || _ <- lists:seq(1, 500) >>,
- [publish(Ch, Q, Payload) || _ <- lists:seq(1, 1000)],
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [Payload || _ <- lists:seq(1, 1000)]),
run_proper(
fun () ->
@@ -597,7 +834,88 @@ consume_offset(Config) ->
amqp_channel:call(Ch1, #'basic.cancel'{consumer_tag = <<"ctag">>}),
true
end)
- end, [], 25).
+ end, [], 5), %% Run it only 5 times. This test times out quite often, not in the receive
+%% clause but ct itself. Consume so many messages so many times could take too long
+%% in some CPU configurations. Let's trust that many rounds of CI could find any real failure.
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+consume_timestamp_offset(Config) ->
+ [Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
+ publish_confirm(Ch, Q, [<<"111">> || _ <- lists:seq(1, 100)]),
+
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
+ qos(Ch1, 10, false),
+
+ Offset = erlang:system_time(second) - 60,
+ amqp_channel:subscribe(
+ Ch1,
+ #'basic.consume'{queue = Q,
+ no_ack = false,
+ consumer_tag = <<"ctag">>,
+ arguments = [{<<"x-stream-offset">>, timestamp, Offset}]},
+ self()),
+ receive
+ #'basic.consume_ok'{consumer_tag = <<"ctag">>} ->
+ ok
+ after 5000 ->
+ exit(consume_ok_timeout)
+ end,
+
+ %% It has subscribed to a very old timestamp, so we will receive the whole stream
+ receive_batch(Ch1, 0, 99),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+consume_timestamp_last_offset(Config) ->
+ [Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
+ publish_confirm(Ch, Q, [<<"111">> || _ <- lists:seq(1, 100)]),
+
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
+ qos(Ch1, 10, false),
+
+ %% Subscribe from now/future
+ Offset = erlang:system_time(second) + 60,
+ amqp_channel:subscribe(
+ Ch1,
+ #'basic.consume'{queue = Q,
+ no_ack = false,
+ consumer_tag = <<"ctag">>,
+ arguments = [{<<"x-stream-offset">>, timestamp, Offset}]},
+ self()),
+ receive
+ #'basic.consume_ok'{consumer_tag = <<"ctag">>} ->
+ ok
+ after 5000 ->
+ exit(missing_consume_ok)
+ end,
+
+ receive
+ {_,
+ #amqp_msg{props = #'P_basic'{headers = [{<<"x-stream-offset">>, long, S}]}}}
+ when S < 100 ->
+ exit({unexpected_offset, S})
+ after 1000 ->
+ ok
+ end,
+
+ %% Publish a few more
+ [publish(Ch, Q, <<"msg2">>) || _ <- lists:seq(1, 100)],
+ amqp_channel:wait_for_confirms(Ch, 5),
+
+ %% Yeah! we got them
+ receive_batch(Ch1, 100, 199),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
basic_get(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -606,9 +924,10 @@ basic_get(Config) ->
Q = ?config(queue_name, Config),
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
-
+
?assertExit({{shutdown, {connection_closing, {server_initiated_close, 540, _}}}, _},
- amqp_channel:call(Ch, #'basic.get'{queue = Q})).
+ amqp_channel:call(Ch, #'basic.get'{queue = Q})),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_with_autoack(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -623,7 +942,8 @@ consume_with_autoack(Config) ->
?assertExit(
{{shutdown, {connection_closing, {server_initiated_close, 540, _}}}, _},
- subscribe(Ch1, Q, true, 0)).
+ subscribe(Ch1, Q, true, 0)),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_and_nack(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -633,10 +953,7 @@ consume_and_nack(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- publish(Ch, Q),
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg">>]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
qos(Ch1, 10, false),
@@ -653,7 +970,8 @@ consume_and_nack(Config) ->
declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}]))
after 10000 ->
exit(timeout)
- end.
+ end,
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
basic_cancel(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -663,26 +981,35 @@ basic_cancel(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- publish(Ch, Q),
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg">>]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
qos(Ch1, 10, false),
- subscribe(Ch1, Q, false, 0),
+ CTag = <<"basic_cancel">>,
+ subscribe(Ch1, Q, false, 0, CTag),
rabbit_ct_helpers:await_condition(
fun() ->
- 1 == length(rabbit_ct_broker_helpers:rpc(Config, Server, ets, tab2list,
- [consumer_created]))
+ 1 == length(filter_consumers(Config, Server, CTag))
end, 30000),
receive
{#'basic.deliver'{}, _} ->
- amqp_channel:call(Ch1, #'basic.cancel'{consumer_tag = <<"ctag">>}),
- ?assertMatch([], rabbit_ct_broker_helpers:rpc(Config, Server, ets, tab2list, [consumer_created]))
+ amqp_channel:call(Ch1, #'basic.cancel'{consumer_tag = CTag}),
+ ?assertMatch([], filter_consumers(Config, Server, CTag))
after 10000 ->
exit(timeout)
- end.
+ end,
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+filter_consumers(Config, Server, CTag) ->
+ CInfo = rabbit_ct_broker_helpers:rpc(Config, Server, ets, tab2list, [consumer_created]),
+ lists:foldl(fun(Tuple, Acc) ->
+ Key = element(1, Tuple),
+ case Key of
+ {_, _, CTag} ->
+ [Key | Acc];
+ _ -> Acc
+ end
+ end, [], CInfo).
consume_and_reject(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -692,10 +1019,7 @@ consume_and_reject(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- publish(Ch, Q),
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg">>]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
qos(Ch1, 10, false),
@@ -711,7 +1035,8 @@ consume_and_reject(Config) ->
declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}]))
after 10000 ->
exit(timeout)
- end.
+ end,
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_and_ack(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -721,10 +1046,7 @@ consume_and_ack(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- publish(Ch, Q),
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg">>]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
qos(Ch1, 10, false),
@@ -736,12 +1058,33 @@ consume_and_ack(Config) ->
%% It will succeed as ack is now a credit operation. We should be
%% able to redeclare a queue (gen_server call op) as the channel
%% should still be open and declare is an idempotent operation
- ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ %%
+ ?assertMatch({'queue.declare_ok', Q, _MsgCount, 0},
declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
quorum_queue_utils:wait_for_messages(Config, [[Q, <<"1">>, <<"1">>, <<"0">>]])
after 5000 ->
exit(timeout)
- end.
+ end,
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+tracking_status(Config) ->
+ [Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
+ Vhost = ?config(rmq_vhost, Config),
+ ?assertEqual([], rabbit_ct_broker_helpers:rpc(Config, Server, rabbit_stream_queue, ?FUNCTION_NAME, [Vhost, Q])),
+ publish_confirm(Ch, Q, [<<"msg">>]),
+ ?assertMatch([[
+ {type, sequence},
+ {reference, _WriterID},
+ {value, {_Offset = 0, _Seq = 1}}
+ ]],
+ rabbit_ct_broker_helpers:rpc(Config, Server, rabbit_stream_queue, ?FUNCTION_NAME, [Vhost, Q])),
+ rabbit_ct_broker_helpers:rpc(Config, Server, ?MODULE, delete_testcase_queue, [Q]).
consume_from_last(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -752,22 +1095,22 @@ consume_from_last(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- [publish(Ch, Q, <<"msg1">>) || _ <- lists:seq(1, 100)],
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg1">> || _ <- lists:seq(1, 100)]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
qos(Ch1, 10, false),
- [Info] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [committed_offset]]),
+ rabbit_ct_helpers:await_condition(
+ fun () ->
+ Info = find_queue_info(Config, [committed_offset]),
+ %% We'll receive data from the last committed offset, let's check that is not the
+ %% first offset
+ proplists:get_value(committed_offset, Info) > 0
+ end),
+
+ CommittedOffset = proplists:get_value(committed_offset,
+ find_queue_info(Config, [committed_offset])),
- %% We'll receive data from the last committed offset, let's check that is not the
- %% first offset
- CommittedOffset = proplists:get_value(committed_offset, Info),
- ?assert(CommittedOffset > 0),
-
%% If the offset is not provided, we're subscribing to the tail of the stream
amqp_channel:subscribe(
Ch1, #'basic.consume'{queue = Q,
@@ -780,15 +1123,18 @@ consume_from_last(Config) ->
ok
end,
- %% And receive the messages from the last committed offset to the end of the stream
- receive_batch(Ch1, CommittedOffset, 99),
+ %% Check that the first received offset is greater than or equal than the committed
+ %% offset. It could have moved since we checked it out - it flakes sometimes!
+ %% Usually when the CommittedOffset detected is 1
+ receive_batch_min_offset(Ch1, CommittedOffset, 99),
%% Publish a few more
[publish(Ch, Q, <<"msg2">>) || _ <- lists:seq(1, 100)],
amqp_channel:wait_for_confirms(Ch, 5),
%% Yeah! we got them
- receive_batch(Ch1, 100, 199).
+ receive_batch(Ch1, 100, 199),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_from_next(Config) ->
consume_from_next(Config, [{<<"x-stream-offset">>, longstr, <<"next">>}]).
@@ -805,21 +1151,19 @@ consume_from_next(Config, Args) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- [publish(Ch, Q, <<"msg1">>) || _ <- lists:seq(1, 100)],
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg1">> || _ <- lists:seq(1, 100)]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
qos(Ch1, 10, false),
- [Info] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [committed_offset]]),
- %% We'll receive data from the last committed offset, let's check that is not the
- %% first offset
- CommittedOffset = proplists:get_value(committed_offset, Info),
- ?assert(CommittedOffset > 0),
+ rabbit_ct_helpers:await_condition(
+ fun () ->
+ Info = find_queue_info(Config, [committed_offset]),
+ %% We'll receive data from the last committed offset, let's check that is not the
+ %% first offset
+ proplists:get_value(committed_offset, Info) > 0
+ end),
%% If the offset is not provided, we're subscribing to the tail of the stream
amqp_channel:subscribe(
@@ -831,6 +1175,8 @@ consume_from_next(Config, Args) ->
receive
#'basic.consume_ok'{consumer_tag = <<"ctag">>} ->
ok
+ after 10000 ->
+ exit(consume_ok_failed)
end,
%% Publish a few more
@@ -838,7 +1184,35 @@ consume_from_next(Config, Args) ->
amqp_channel:wait_for_confirms(Ch, 5),
%% Yeah! we got them
- receive_batch(Ch1, 100, 199).
+ receive_batch(Ch1, 100, 199),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+consume_from_relative_time_offset(Config) ->
+ [Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
+ Q = ?config(queue_name, Config),
+
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
+ publish_confirm(Ch, Q, [<<"msg1">> || _ <- lists:seq(1, 100)]),
+
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
+ qos(Ch1, 10, false),
+ amqp_channel:subscribe(
+ Ch1, #'basic.consume'{queue = Q,
+ no_ack = false,
+ consumer_tag = <<"ctag">>,
+ arguments = [{<<"x-stream-offset">>, longstr, <<"100s">>}]},
+ self()),
+ receive
+ #'basic.consume_ok'{consumer_tag = <<"ctag">>} ->
+ ok
+ end,
+
+ receive_batch(Ch1, 0, 99),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_from_replica(Config) ->
[Server1, Server2 | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -849,16 +1223,20 @@ consume_from_replica(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch1, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch1, self()),
- [publish(Ch1, Q, <<"msg1">>) || _ <- lists:seq(1, 100)],
- amqp_channel:wait_for_confirms(Ch1, 5),
+ publish_confirm(Ch1, Q, [<<"msg1">> || _ <- lists:seq(1, 100)]),
+
+ rabbit_ct_helpers:await_condition(
+ fun () ->
+ Info = find_queue_info(Config, 1, [online]),
+ length(proplists:get_value(online, Info)) == 3
+ end),
Ch2 = rabbit_ct_client_helpers:open_channel(Config, Server2),
qos(Ch2, 10, false),
-
+
subscribe(Ch2, Q, false, 0),
- receive_batch(Ch2, 0, 99).
+ receive_batch(Ch2, 0, 99),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_credit(Config) ->
%% Because osiris provides one chunk on every read and we don't want to buffer
@@ -874,12 +1252,9 @@ consume_credit(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
%% Let's publish a big batch, to ensure we have more than a chunk available
NumMsgs = 100,
- [publish(Ch, Q, <<"msg1">>) || _ <- lists:seq(1, NumMsgs)],
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg1">> || _ <- lists:seq(1, NumMsgs)]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
@@ -919,7 +1294,8 @@ consume_credit(Config) ->
ok
after 5000 ->
exit(timeout)
- end.
+ end,
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_credit_out_of_order_ack(Config) ->
%% Like consume_credit but acknowledging the messages out of order.
@@ -934,12 +1310,9 @@ consume_credit_out_of_order_ack(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- %% Let's publish a big batch, to ensure we have more than a chunk available
NumMsgs = 100,
- [publish(Ch, Q, <<"msg1">>) || _ <- lists:seq(1, NumMsgs)],
- amqp_channel:wait_for_confirms(Ch, 5),
+ %% Let's publish a big batch, to ensure we have more than a chunk available
+ publish_confirm(Ch, Q, [<<"msg1">> || _ <- lists:seq(1, NumMsgs)]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
@@ -980,7 +1353,8 @@ consume_credit_out_of_order_ack(Config) ->
ok
after 5000 ->
exit(timeout)
- end.
+ end,
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
consume_credit_multiple_ack(Config) ->
%% Like consume_credit but acknowledging the messages out of order.
@@ -995,12 +1369,9 @@ consume_credit_multiple_ack(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
%% Let's publish a big batch, to ensure we have more than a chunk available
NumMsgs = 100,
- [publish(Ch, Q, <<"msg1">>) || _ <- lists:seq(1, NumMsgs)],
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [<<"msg1">> || _ <- lists:seq(1, NumMsgs)]),
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
@@ -1022,7 +1393,8 @@ consume_credit_multiple_ack(Config) ->
ok
after 5000 ->
exit(timeout)
- end.
+ end,
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
max_length_bytes(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1031,15 +1403,13 @@ max_length_bytes(Config) ->
Q = ?config(queue_name, Config),
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
- {<<"x-max-length-bytes">>, long, 500},
- {<<"x-max-segment-size">>, long, 250}])),
+ {<<"x-max-length-bytes">>, long, 10000},
+ {<<"x-stream-max-segment-size-bytes">>, long, 1000}])),
- Payload = << <<"1">> || _ <- lists:seq(1, 500) >>,
+ Payload = << <<"1">> || _ <- lists:seq(1, 100) >>,
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- [publish(Ch, Q, Payload) || _ <- lists:seq(1, 100)],
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [Payload || _ <- lists:seq(1, 500)]), %% 100 bytes/msg * 500 = 50000 bytes
+ ensure_retention_applied(Config, Server),
%% We don't yet have reliable metrics, as the committed offset doesn't work
%% as a counter once we start applying retention policies.
@@ -1048,7 +1418,10 @@ max_length_bytes(Config) ->
qos(Ch1, 100, false),
subscribe(Ch1, Q, false, 0),
- ?assert(length(receive_batch()) < 100).
+ %% There should be ~100 messages in ~10 segments, but less check that the retention
+ %% cleared just a big bunch
+ ?assert(length(receive_batch()) < 200),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
max_age(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1058,14 +1431,11 @@ max_age(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-max-age">>, longstr, <<"10s">>},
- {<<"x-max-segment-size">>, long, 250}])),
+ {<<"x-stream-max-segment-size-bytes">>, long, 250}])),
Payload = << <<"1">> || _ <- lists:seq(1, 500) >>,
- #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch, self()),
- [publish(Ch, Q, Payload) || _ <- lists:seq(1, 100)],
- amqp_channel:wait_for_confirms(Ch, 5),
+ publish_confirm(Ch, Q, [Payload || _ <- lists:seq(1, 100)]),
timer:sleep(10000),
@@ -1078,7 +1448,51 @@ max_age(Config) ->
Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server),
qos(Ch1, 200, false),
subscribe(Ch1, Q, false, 0),
- ?assertEqual(100, length(receive_batch())).
+ ?assertEqual(100, length(receive_batch())),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+replica_recovery(Config) ->
+ Nodes = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ [Server1 | _] = lists:reverse(Nodes),
+
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server1),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ publish_confirm(Ch1, Q, [<<"msg1">> || _ <- lists:seq(1, 100)]),
+ amqp_channel:close(Ch1),
+
+ CheckReplicaRecovered =
+ fun(DownNode) ->
+ rabbit_ct_helpers:await_condition(
+ fun () ->
+ timer:sleep(1000),
+ ct:pal("Wait for replica to recover..."),
+ try
+ {Conn, Ch2} = rabbit_ct_client_helpers:open_connection_and_channel(Config, DownNode),
+ qos(Ch2, 10, false),
+ subscribe(Ch2, Q, false, 0),
+ receive_batch(Ch2, 0, 99),
+ amqp_connection:close(Conn),
+ true
+ catch _:_ ->
+ false
+ end
+ end, 30000)
+ end,
+
+ [begin
+ rabbit_control_helper:command(stop_app, DownNode),
+ rabbit_control_helper:command(start_app, DownNode),
+ CheckReplicaRecovered(DownNode)
+ end || [DownNode | _] <- permute(Nodes)],
+
+ [begin
+ ok = rabbit_ct_broker_helpers:stop_node(Config, DownNode),
+ ok = rabbit_ct_broker_helpers:start_node(Config, DownNode),
+ CheckReplicaRecovered(DownNode)
+ end || [DownNode | _] <- permute(Nodes)],
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
leader_failover(Config) ->
[Server1, Server2, Server3] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1089,26 +1503,95 @@ leader_failover(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- #'confirm.select_ok'{} = amqp_channel:call(Ch1, #'confirm.select'{}),
- amqp_channel:register_confirm_handler(Ch1, self()),
- [publish(Ch1, Q, <<"msg">>) || _ <- lists:seq(1, 100)],
- amqp_channel:wait_for_confirms(Ch1, 5),
-
- check_leader_and_replicas(Config, Q, Server1, [Server2, Server3]),
+ check_leader_and_replicas(Config, [Server1, Server2, Server3]),
+ publish_confirm(Ch1, Q, [<<"msg">> || _ <- lists:seq(1, 100)]),
ok = rabbit_ct_broker_helpers:stop_node(Config, Server1),
timer:sleep(30000),
- [Info] = lists:filter(
- fun(Props) ->
- QName = rabbit_misc:r(<<"/">>, queue, Q),
- lists:member({name, QName}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 1, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader, members]])),
- NewLeader = proplists:get_value(leader, Info),
- ?assert(NewLeader =/= Server1),
- ok = rabbit_ct_broker_helpers:start_node(Config, Server1).
+ rabbit_ct_helpers:await_condition(
+ fun () ->
+ Info = find_queue_info(Config, 1, [leader, members]),
+
+ NewLeader = proplists:get_value(leader, Info),
+ NewLeader =/= Server1
+ end),
+ ok = rabbit_ct_broker_helpers:start_node(Config, Server1),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+leader_failover_dedupe(Config) ->
+ %% tests that in-flight messages are automatically handled in the case where
+ %% a leader change happens during publishing
+ PermNodes = permute(
+ rabbit_ct_broker_helpers:get_node_configs(Config, nodename)),
+ %% pick a random node order for this test
+ %% realle we should run all permuations
+ Nodes = lists:nth(rand:uniform(length(PermNodes)), PermNodes),
+ ct:pal("~s running with nodes ~w", [?FUNCTION_NAME, Nodes]),
+ [_Server1, DownNode, PubNode] = Nodes,
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, DownNode),
+ Q = ?config(queue_name, Config),
+
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
+ check_leader_and_replicas(Config, Nodes),
+
+ Ch2 = rabbit_ct_client_helpers:open_channel(Config, PubNode),
+ #'confirm.select_ok'{} = amqp_channel:call(Ch2, #'confirm.select'{}),
+
+ Self= self(),
+ F = fun F(N) ->
+ receive
+ go ->
+ [publish(Ch2, Q, integer_to_binary(N + I))
+ || I <- lists:seq(1, 100)],
+ true = amqp_channel:wait_for_confirms(Ch2, 25),
+ F(N + 100);
+ stop ->
+ Self ! {last_msg, N},
+ ct:pal("stop"),
+ ok
+ after 2 ->
+ self() ! go,
+ F(N)
+ end
+ end,
+ Pid = spawn(fun () ->
+ amqp_channel:register_confirm_handler(Ch2, self()),
+ F(0)
+ end),
+ erlang:monitor(process, Pid),
+ Pid ! go,
+ timer:sleep(10),
+ ok = rabbit_ct_broker_helpers:stop_node(Config, DownNode),
+ %% this should cause a new leader to be elected and the channel on node 2
+ %% to have to resend any pending messages to ensure none is lost
+ ct:pal("preinfo", []),
+ rabbit_ct_helpers:await_condition(
+ fun() ->
+ Info = find_queue_info(Config, PubNode, [leader, members]),
+ ct:pal("info ~p", [Info]),
+ NewLeader = proplists:get_value(leader, Info),
+ NewLeader =/= DownNode
+ end),
+ flush(),
+ ?assert(erlang:is_process_alive(Pid)),
+ ct:pal("stopping"),
+ Pid ! stop,
+ ok = rabbit_ct_broker_helpers:start_node(Config, DownNode),
+
+ N = receive
+ {last_msg, X} -> X
+ after 2000 ->
+ exit(last_msg_timeout)
+ end,
+ %% validate that no duplicates were written even though an internal
+ %% resend might have taken place
+ qos(Ch2, 100, false),
+ subscribe(Ch2, Q, false, 0),
+ validate_dedupe(Ch2, 1, N),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
initial_cluster_size_one(Config) ->
[Server1 | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1119,10 +1602,11 @@ initial_cluster_size_one(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-initial-cluster-size">>, long, 1}])),
- check_leader_and_replicas(Config, Q, Server1, []),
+ check_leader_and_replicas(Config, [Server1]),
?assertMatch(#'queue.delete_ok'{},
- amqp_channel:call(Ch, #'queue.delete'{queue = Q})).
+ amqp_channel:call(Ch, #'queue.delete'{queue = Q})),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
initial_cluster_size_two(Config) ->
[Server1 | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1134,17 +1618,14 @@ initial_cluster_size_two(Config) ->
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-initial-cluster-size">>, long, 2}])),
- [Info] = lists:filter(
- fun(Props) ->
- lists:member({name, rabbit_misc:r(<<"/">>, queue, Q)}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader, members]])),
+ Info = find_queue_info(Config, [leader, members]),
+
?assertEqual(Server1, proplists:get_value(leader, Info)),
- ?assertEqual(1, length(proplists:get_value(members, Info))),
+ ?assertEqual(2, length(proplists:get_value(members, Info))),
?assertMatch(#'queue.delete_ok'{},
- amqp_channel:call(Ch, #'queue.delete'{queue = Q})).
+ amqp_channel:call(Ch, #'queue.delete'{queue = Q})),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
initial_cluster_size_one_policy(Config) ->
[Server1 | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1159,12 +1640,28 @@ initial_cluster_size_one_policy(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-initial-cluster-size">>, long, 1}])),
- check_leader_and_replicas(Config, Q, Server1, []),
+ check_leader_and_replicas(Config, [Server1]),
?assertMatch(#'queue.delete_ok'{},
amqp_channel:call(Ch, #'queue.delete'{queue = Q})),
- ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"cluster-size">>).
+ ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"cluster-size">>),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+declare_delete_same_stream(Config) ->
+ Servers = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ Q = ?config(queue_name, Config),
+
+ [begin
+ Ch = rabbit_ct_client_helpers:open_channel(Config, S),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ ?assertMatch(#'queue.delete_ok'{},
+ amqp_channel:call(Ch, #'queue.delete'{queue = Q})),
+ rabbit_ct_client_helpers:close_channel(Ch)
+ end || _ <- lists:seq(1, 20), S <- Servers],
+
+ ok.
leader_locator_client_local(Config) ->
[Server1, Server2, Server3] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1176,12 +1673,8 @@ leader_locator_client_local(Config) ->
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-queue-leader-locator">>, longstr, <<"client-local">>}])),
- [Info] = lists:filter(
- fun(Props) ->
- lists:member({name, rabbit_misc:r(<<"/">>, queue, Q)}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader]])),
+ Info = find_queue_info(Config, [leader]),
+
?assertEqual(Server1, proplists:get_value(leader, Info)),
?assertMatch(#'queue.delete_ok'{},
@@ -1193,12 +1686,7 @@ leader_locator_client_local(Config) ->
declare(Ch2, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-queue-leader-locator">>, longstr, <<"client-local">>}])),
- [Info2] = lists:filter(
- fun(Props) ->
- lists:member({name, rabbit_misc:r(<<"/">>, queue, Q)}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader]])),
+ Info2 = find_queue_info(Config, [leader]),
?assertEqual(Server2, proplists:get_value(leader, Info2)),
?assertMatch(#'queue.delete_ok'{},
@@ -1210,16 +1698,13 @@ leader_locator_client_local(Config) ->
declare(Ch3, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-queue-leader-locator">>, longstr, <<"client-local">>}])),
- [Info3] = lists:filter(
- fun(Props) ->
- lists:member({name, rabbit_misc:r(<<"/">>, queue, Q)}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader]])),
+
+ Info3 = find_queue_info(Config, [leader]),
?assertEqual(Server3, proplists:get_value(leader, Info3)),
?assertMatch(#'queue.delete_ok'{},
- amqp_channel:call(Ch3, #'queue.delete'{queue = Q})).
+ amqp_channel:call(Ch3, #'queue.delete'{queue = Q})),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
leader_locator_random(Config) ->
[Server1 | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1231,12 +1716,7 @@ leader_locator_random(Config) ->
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-queue-leader-locator">>, longstr, <<"random">>}])),
- [Info] = lists:filter(
- fun(Props) ->
- lists:member({name, rabbit_misc:r(<<"/">>, queue, Q)}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader]])),
+ Info = find_queue_info(Config, [leader]),
Leader = proplists:get_value(leader, Info),
?assertMatch(#'queue.delete_ok'{},
@@ -1251,16 +1731,12 @@ leader_locator_random(Config) ->
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-queue-leader-locator">>, longstr, <<"random">>}])),
- [Info2] = lists:filter(
- fun(Props) ->
- lists:member({name, rabbit_misc:r(<<"/">>, queue, Q)}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader]])),
+ Info2 = find_queue_info(Config, [leader]),
Leader2 = proplists:get_value(leader, Info2),
Leader =/= Leader2
- end, 10).
+ end, 10),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
leader_locator_least_leaders(Config) ->
[Server1, Server2, Server3] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1281,15 +1757,11 @@ leader_locator_least_leaders(Config) ->
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
{<<"x-queue-leader-locator">>, longstr, <<"least-leaders">>}])),
- [Info] = lists:filter(
- fun(Props) ->
- lists:member({name, rabbit_misc:r(<<"/">>, queue, Q)}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader]])),
+ Info = find_queue_info(Config, [leader]),
Leader = proplists:get_value(leader, Info),
- ?assert(lists:member(Leader, [Server2, Server3])).
+ ?assert(lists:member(Leader, [Server2, Server3])),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
leader_locator_policy(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1304,10 +1776,7 @@ leader_locator_policy(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- [Info] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [policy, operator_policy,
- effective_policy_definition,
- name, leader]]),
+ Info = find_queue_info(Config, [policy, operator_policy, effective_policy_definition, leader]),
?assertEqual(<<"leader-locator">>, proplists:get_value(policy, Info)),
?assertEqual('', proplists:get_value(operator_policy, Info)),
@@ -1324,17 +1793,41 @@ leader_locator_policy(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
- [Info2] = lists:filter(
- fun(Props) ->
- lists:member({name, rabbit_misc:r(<<"/">>, queue, Q)}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader]])),
+ Info2 = find_queue_info(Config, [leader]),
Leader2 = proplists:get_value(leader, Info2),
Leader =/= Leader2
end, 10),
- ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"leader-locator">>).
+ ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"leader-locator">>),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+queue_size_on_declare(Config) ->
+ [Server1, Server2, Server3] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+
+ Ch1 = rabbit_ct_client_helpers:open_channel(Config, Server1),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ publish_confirm(Ch1, Q, [<<"msg1">> || _ <- lists:seq(1, 100)]),
+
+ %% Metrics update is not synchronous, wait until metrics are updated on the leader node.
+ %% Afterwards, all replicas will get the right size as they have to query the writer node
+ ?awaitMatch({'queue.declare_ok', Q, 100, 0},
+ declare(Ch1, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}]),
+ 60000),
+ amqp_channel:close(Ch1),
+
+ Ch2 = rabbit_ct_client_helpers:open_channel(Config, Server2),
+ ?assertEqual({'queue.declare_ok', Q, 100, 0},
+ declare(Ch2, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ amqp_channel:close(Ch2),
+
+ Ch3 = rabbit_ct_client_helpers:open_channel(Config, Server3),
+ ?assertEqual({'queue.declare_ok', Q, 100, 0},
+ declare(Ch3, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+ amqp_channel:close(Ch3),
+
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
repeat_until(_, 0) ->
ct:fail("Condition did not materialize in the expected amount of attempts");
@@ -1358,15 +1851,14 @@ invalid_policy(Config) ->
Config, 0, <<"ttl">>, <<"invalid_policy.*">>, <<"queues">>,
[{<<"message-ttl">>, 5}]),
- [Info] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [policy, operator_policy,
- effective_policy_definition]]),
+ Info = find_queue_info(Config, [policy, operator_policy, effective_policy_definition]),
?assertEqual('', proplists:get_value(policy, Info)),
?assertEqual('', proplists:get_value(operator_policy, Info)),
?assertEqual([], proplists:get_value(effective_policy_definition, Info)),
ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"ha">>),
- ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"ttl">>).
+ ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"ttl">>),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
max_age_policy(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
@@ -1375,21 +1867,74 @@ max_age_policy(Config) ->
Q = ?config(queue_name, Config),
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
ok = rabbit_ct_broker_helpers:set_policy(
Config, 0, <<"age">>, <<"max_age_policy.*">>, <<"queues">>,
[{<<"max-age">>, <<"1Y">>}]),
- [Info] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [policy, operator_policy,
- effective_policy_definition]]),
+ Info = find_queue_info(Config, [policy, operator_policy, effective_policy_definition]),
?assertEqual(<<"age">>, proplists:get_value(policy, Info)),
?assertEqual('', proplists:get_value(operator_policy, Info)),
?assertEqual([{<<"max-age">>, <<"1Y">>}],
proplists:get_value(effective_policy_definition, Info)),
- ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"age">>).
-max_segment_size_policy(Config) ->
+ ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"age">>),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+update_retention_policy(Config) ->
+ [Server | _] = Servers = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>},
+ {<<"x-stream-max-segment-size-bytes">>, long, 200}
+ ])),
+ check_leader_and_replicas(Config, Servers),
+
+ Msgs = [<<"msg">> || _ <- lists:seq(1, 10000)], %% 3 bytes * 10000 = 30000 bytes
+ publish_confirm(Ch, Q, Msgs),
+
+ {ok, Q0} = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue, lookup,
+ [rabbit_misc:r(<<"/">>, queue, Q)]),
+ %% Don't use time based retention, it's really hard to get those tests right
+ ok = rabbit_ct_broker_helpers:set_policy(
+ Config, 0, <<"retention">>, <<"update_retention_policy.*">>, <<"queues">>,
+ [{<<"max-length-bytes">>, 10000}]),
+ ensure_retention_applied(Config, Server),
+
+ %% Retention policy should clear approximately 2/3 of the messages, but just to be safe
+ %% let's simply check that it removed half of them
+ quorum_queue_utils:wait_for_max_messages(Config, Q, 5000),
+
+ {ok, Q1} = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue, lookup,
+ [rabbit_misc:r(<<"/">>, queue, Q)]),
+
+ %% If there are changes only in the retention policy, processes should not be restarted
+ ?assertEqual(amqqueue:get_pid(Q0), amqqueue:get_pid(Q1)),
+
+ ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"retention">>),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+queue_info(Config) ->
+ [Server | _] = Servers = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+
+ Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
+ Q = ?config(queue_name, Config),
+ ?assertEqual({'queue.declare_ok', Q, 0, 0},
+ declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
+
+ rabbit_ct_helpers:await_condition(
+ fun() ->
+ Info = find_queue_info(Config, [leader, online, members]),
+ lists:member(proplists:get_value(leader, Info), Servers) andalso
+ (lists:sort(Servers) == lists:sort(proplists:get_value(members, Info))) andalso
+ (lists:sort(Servers) == lists:sort(proplists:get_value(online, Info)))
+ end),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
+
+max_segment_size_bytes_policy(Config) ->
[Server | _] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
Ch = rabbit_ct_client_helpers:open_channel(Config, Server),
@@ -1397,18 +1942,17 @@ max_segment_size_policy(Config) ->
?assertEqual({'queue.declare_ok', Q, 0, 0},
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
ok = rabbit_ct_broker_helpers:set_policy(
- Config, 0, <<"segment">>, <<"max_segment_size.*">>, <<"queues">>,
- [{<<"max-segment-size">>, 5000}]),
+ Config, 0, <<"segment">>, <<"max_segment_size_bytes.*">>, <<"queues">>,
+ [{<<"stream-max-segment-size-bytes">>, 5000}]),
- [Info] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [policy, operator_policy,
- effective_policy_definition]]),
+ Info = find_queue_info(Config, [policy, operator_policy, effective_policy_definition]),
?assertEqual(<<"segment">>, proplists:get_value(policy, Info)),
?assertEqual('', proplists:get_value(operator_policy, Info)),
- ?assertEqual([{<<"max-segment-size">>, 5000}],
+ ?assertEqual([{<<"stream-max-segment-size-bytes">>, 5000}],
proplists:get_value(effective_policy_definition, Info)),
- ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"segment">>).
+ ok = rabbit_ct_broker_helpers:clear_policy(Config, 0, <<"segment">>),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
purge(Config) ->
Server = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
@@ -1419,7 +1963,8 @@ purge(Config) ->
declare(Ch, Q, [{<<"x-queue-type">>, longstr, <<"stream">>}])),
?assertExit({{shutdown, {connection_closing, {server_initiated_close, 540, _}}}, _},
- amqp_channel:call(Ch, #'queue.purge'{queue = Q})).
+ amqp_channel:call(Ch, #'queue.purge'{queue = Q})),
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, delete_testcase_queue, [Q]).
%%----------------------------------------------------------------------------
@@ -1427,6 +1972,15 @@ delete_queues() ->
[{ok, _} = rabbit_amqqueue:delete(Q, false, false, <<"dummy">>)
|| Q <- rabbit_amqqueue:list()].
+delete_testcase_queue(Name) ->
+ QName = rabbit_misc:r(<<"/">>, queue, Name),
+ case rabbit_amqqueue:lookup(QName) of
+ {ok, Q} ->
+ {ok, _} = rabbit_amqqueue:delete(Q, false, false, <<"dummy">>);
+ _ ->
+ ok
+ end.
+
declare(Ch, Q) ->
declare(Ch, Q, []).
@@ -1444,17 +1998,17 @@ get_queue_type(Server, Q0) ->
{ok, Q1} = rpc:call(Server, rabbit_amqqueue, lookup, [QNameRes]),
amqqueue:get_type(Q1).
-check_leader_and_replicas(Config, Name, Leader, Replicas0) ->
- QNameRes = rabbit_misc:r(<<"/">>, queue, Name),
- [Info] = lists:filter(
- fun(Props) ->
- lists:member({name, QNameRes}, Props)
- end,
- rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue,
- info_all, [<<"/">>, [name, leader, members]])),
- ?assertEqual(Leader, proplists:get_value(leader, Info)),
- Replicas = lists:sort(Replicas0),
- ?assertEqual(Replicas, lists:sort(proplists:get_value(members, Info))).
+check_leader_and_replicas(Config, Members) ->
+ check_leader_and_replicas(Config, Members, online).
+
+check_leader_and_replicas(Config, Members, Tag) ->
+ rabbit_ct_helpers:await_condition(
+ fun() ->
+ Info = find_queue_info(Config, [leader, Tag]),
+ ct:pal("~s members ~w ~p", [?FUNCTION_NAME, Members, Info]),
+ lists:member(proplists:get_value(leader, Info), Members)
+ andalso (lists:sort(Members) == lists:sort(proplists:get_value(Tag, Info)))
+ end, 60000).
publish(Ch, Queue) ->
publish(Ch, Queue, <<"msg">>).
@@ -1465,14 +2019,23 @@ publish(Ch, Queue, Msg) ->
#amqp_msg{props = #'P_basic'{delivery_mode = 2},
payload = Msg}).
+publish_confirm(Ch, Q, Msgs) ->
+ #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
+ amqp_channel:register_confirm_handler(Ch, self()),
+ [publish(Ch, Q, Msg) || Msg <- Msgs],
+ amqp_channel:wait_for_confirms(Ch, 5).
+
subscribe(Ch, Queue, NoAck, Offset) ->
+ subscribe(Ch, Queue, NoAck, Offset, <<"ctag">>).
+
+subscribe(Ch, Queue, NoAck, Offset, CTag) ->
amqp_channel:subscribe(Ch, #'basic.consume'{queue = Queue,
no_ack = NoAck,
- consumer_tag = <<"ctag">>,
+ consumer_tag = CTag,
arguments = [{<<"x-stream-offset">>, long, Offset}]},
self()),
receive
- #'basic.consume_ok'{consumer_tag = <<"ctag">>} ->
+ #'basic.consume_ok'{consumer_tag = CTag} ->
ok
end.
@@ -1481,6 +2044,52 @@ qos(Ch, Prefetch, Global) ->
amqp_channel:call(Ch, #'basic.qos'{global = Global,
prefetch_count = Prefetch})).
+validate_dedupe(Ch, N, N) ->
+ receive
+ {#'basic.deliver'{delivery_tag = DeliveryTag},
+ #amqp_msg{payload = B}} ->
+ I = binary_to_integer(B),
+ ?assertEqual(N, I),
+ ok = amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DeliveryTag,
+ multiple = false})
+ after 60000 ->
+ flush(),
+ exit({missing_record, N})
+ end;
+validate_dedupe(Ch, N, M) ->
+ receive
+ {#'basic.deliver'{delivery_tag = DeliveryTag},
+ #amqp_msg{payload = B}} ->
+ I = binary_to_integer(B),
+ ?assertEqual(N, I),
+ ok = amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DeliveryTag,
+ multiple = false}),
+ validate_dedupe(Ch, N + 1, M)
+ after 60000 ->
+ flush(),
+ exit({missing_record, N})
+ end.
+
+receive_batch_min_offset(Ch, N, M) ->
+ %% We are expecting values from the last committed offset - which might have increased
+ %% since we queried it. Accept as first offset anything greater than the last known
+ %% committed offset
+ receive
+ {_,
+ #amqp_msg{props = #'P_basic'{headers = [{<<"x-stream-offset">>, long, S}]}}}
+ when S < N ->
+ exit({unexpected_offset, S});
+ {#'basic.deliver'{delivery_tag = DeliveryTag},
+ #amqp_msg{props = #'P_basic'{headers = [{<<"x-stream-offset">>, long, S}]}}} ->
+ ct:pal("Committed offset is ~p but as first offset got ~p", [N, S]),
+ ok = amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DeliveryTag,
+ multiple = false}),
+ receive_batch(Ch, S + 1, M)
+ after 60000 ->
+ flush(),
+ exit({missing_offset, N})
+ end.
+
receive_batch(Ch, N, N) ->
receive
{#'basic.deliver'{delivery_tag = DeliveryTag},
@@ -1488,7 +2097,8 @@ receive_batch(Ch, N, N) ->
ok = amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DeliveryTag,
multiple = false})
after 60000 ->
- exit({missing_offset, N})
+ flush(),
+ exit({missing_offset, N})
end;
receive_batch(Ch, N, M) ->
receive
@@ -1502,7 +2112,8 @@ receive_batch(Ch, N, M) ->
multiple = false}),
receive_batch(Ch, N + 1, M)
after 60000 ->
- exit({missing_offset, N})
+ flush(),
+ exit({missing_offset, N})
end.
receive_batch() ->
@@ -1525,3 +2136,22 @@ run_proper(Fun, Args, NumTests) ->
{on_output, fun(".", _) -> ok; % don't print the '.'s on new lines
(F, A) -> ct:pal(?LOW_IMPORTANCE, F, A)
end}])).
+
+flush() ->
+ receive
+ Any ->
+ ct:pal("flush ~p", [Any]),
+ flush()
+ after 0 ->
+ ok
+ end.
+
+permute([]) -> [[]];
+permute(L) -> [[H|T] || H <- L, T <- permute(L--[H])].
+
+ensure_retention_applied(Config, Server) ->
+ %% Retention is asynchronous, so committing all messages doesn't mean old segments have been
+ %% cleared up.
+ %% Let's force a call on the retention gen_server, any pending retention would have been
+ %% processed when this call returns.
+ rabbit_ct_broker_helpers:rpc(Config, Server, gen_server, call, [osiris_retention, test]).
diff --git a/deps/rabbit/test/rabbitmq-env.bats b/deps/rabbit/test/rabbitmq-env.bats
index 4a016960c5..069abb5fdc 100644
--- a/deps/rabbit/test/rabbitmq-env.bats
+++ b/deps/rabbit/test/rabbitmq-env.bats
@@ -126,3 +126,34 @@ setup() {
echo "expected RABBITMQ_SERVER_ERL_ARGS to contain ' +t 4000000 ', but got: $RABBITMQ_SERVER_ERL_ARGS"
[[ $RABBITMQ_SERVER_ERL_ARGS == *" +t 4000000 "* ]]
}
+
+@test "default Erlang scheduler busy wait threshold" {
+ source "$RABBITMQ_SCRIPTS_DIR/rabbitmq-env"
+ echo $RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD
+
+ echo "expected RABBITMQ_SERVER_ERL_ARGS to contain ' +sbwt none ', but got: $RABBITMQ_SERVER_ERL_ARGS"
+ [[ $RABBITMQ_SERVER_ERL_ARGS == *" +sbwt none "* ]]
+}
+
+@test "can configure Erlang scheduler busy wait threshold via conf file" {
+ echo 'SCHEDULER_BUSY_WAIT_THRESHOLD=medium' > "$RABBITMQ_CONF_ENV_FILE"
+ source "$RABBITMQ_SCRIPTS_DIR/rabbitmq-env"
+
+ echo "expected RABBITMQ_SERVER_ERL_ARGS to contain ' +sbwt medium ', but got: $RABBITMQ_SERVER_ERL_ARGS"
+ [[ $RABBITMQ_SERVER_ERL_ARGS == *" +sbwt medium "* ]]
+}
+
+@test "can configure Erlang scheduler busy wait threshold via env" {
+ RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD=long source "$RABBITMQ_SCRIPTS_DIR/rabbitmq-env"
+
+ echo "expected RABBITMQ_SERVER_ERL_ARGS to contain ' +sbwt long ', but got: $RABBITMQ_SERVER_ERL_ARGS"
+ [[ $RABBITMQ_SERVER_ERL_ARGS == *" +sbwt long "* ]]
+}
+
+@test "Erlang scheduler busy wait threshold env takes precedence over conf file" {
+ echo 'SCHEDULER_BIND_TYPE=s' > "$RABBITMQ_CONF_ENV_FILE"
+ RABBITMQ_SCHEDULER_BUSY_WAIT_THRESHOLD=short source "$RABBITMQ_SCRIPTS_DIR/rabbitmq-env"
+
+ echo "expected RABBITMQ_SERVER_ERL_ARGS to contain ' +sbwt short ', but got: $RABBITMQ_SERVER_ERL_ARGS"
+ [[ $RABBITMQ_SERVER_ERL_ARGS == *" +sbwt short "* ]]
+}
diff --git a/deps/rabbit/test/rabbitmq_queues_cli_integration_SUITE.erl b/deps/rabbit/test/rabbitmq_queues_cli_integration_SUITE.erl
index bf5e9ee79e..b41c179cde 100644
--- a/deps/rabbit/test/rabbitmq_queues_cli_integration_SUITE.erl
+++ b/deps/rabbit/test/rabbitmq_queues_cli_integration_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_queues_cli_integration_SUITE).
@@ -27,7 +27,7 @@ groups() ->
].
init_per_suite(Config) ->
- case os:getenv("SECONDARY_UMBRELLA") of
+ case rabbit_ct_helpers:is_mixed_versions() of
false ->
rabbit_ct_helpers:log_environment(),
rabbit_ct_helpers:run_setup_steps(Config);
@@ -70,15 +70,25 @@ shrink(Config) ->
NodeConfig = rabbit_ct_broker_helpers:get_node_config(Config, 2),
Nodename2 = ?config(nodename, NodeConfig),
Ch = rabbit_ct_client_helpers:open_channel(Config, Nodename2),
+ #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
%% declare a quorum queue
QName = "shrink1",
#'queue.declare_ok'{} = declare_qq(Ch, QName),
+ publish_confirm(Ch, QName),
{ok, Out1} = rabbitmq_queues(Config, 0, ["shrink", Nodename2]),
?assertMatch(#{{"/", "shrink1"} := {2, ok}}, parse_result(Out1)),
+ %% removing a node can trigger a leader election, give this QQ some time
+ %% to do it
+ timer:sleep(1500),
Nodename1 = rabbit_ct_broker_helpers:get_node_config(Config, 1, nodename),
+ publish_confirm(Ch, QName),
{ok, Out2} = rabbitmq_queues(Config, 0, ["shrink", Nodename1]),
?assertMatch(#{{"/", "shrink1"} := {1, ok}}, parse_result(Out2)),
+ %% removing a node can trigger a leader election, give this QQ some time
+ %% to do it
+ timer:sleep(1500),
Nodename0 = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ publish_confirm(Ch, QName),
{ok, Out3} = rabbitmq_queues(Config, 0, ["shrink", Nodename0]),
?assertMatch(#{{"/", "shrink1"} := {1, error}}, parse_result(Out3)),
ok.
@@ -87,17 +97,21 @@ grow(Config) ->
NodeConfig = rabbit_ct_broker_helpers:get_node_config(Config, 2),
Nodename2 = ?config(nodename, NodeConfig),
Ch = rabbit_ct_client_helpers:open_channel(Config, Nodename2),
+ #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
%% declare a quorum queue
QName = "grow1",
Args = [{<<"x-quorum-initial-group-size">>, long, 1}],
#'queue.declare_ok'{} = declare_qq(Ch, QName, Args),
Nodename0 = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ publish_confirm(Ch, QName),
{ok, Out1} = rabbitmq_queues(Config, 0, ["grow", Nodename0, "all"]),
?assertMatch(#{{"/", "grow1"} := {2, ok}}, parse_result(Out1)),
+ timer:sleep(500),
Nodename1 = rabbit_ct_broker_helpers:get_node_config(Config, 1, nodename),
+ publish_confirm(Ch, QName),
{ok, Out2} = rabbitmq_queues(Config, 0, ["grow", Nodename1, "all"]),
?assertMatch(#{{"/", "grow1"} := {3, ok}}, parse_result(Out2)),
-
+ publish_confirm(Ch, QName),
{ok, Out3} = rabbitmq_queues(Config, 0, ["grow", Nodename0, "all"]),
?assertNotMatch(#{{"/", "grow1"} := _}, parse_result(Out3)),
ok.
@@ -106,11 +120,13 @@ grow_invalid_node_filtered(Config) ->
NodeConfig = rabbit_ct_broker_helpers:get_node_config(Config, 2),
Nodename2 = ?config(nodename, NodeConfig),
Ch = rabbit_ct_client_helpers:open_channel(Config, Nodename2),
+ #'confirm.select_ok'{} = amqp_channel:call(Ch, #'confirm.select'{}),
%% declare a quorum queue
QName = "grow-err",
Args = [{<<"x-quorum-initial-group-size">>, long, 1}],
#'queue.declare_ok'{} = declare_qq(Ch, QName, Args),
DummyNode = not_really_a_node@nothing,
+ publish_confirm(Ch, QName),
{ok, Out1} = rabbitmq_queues(Config, 0, ["grow", DummyNode, "all"]),
?assertNotMatch(#{{"/", "grow-err"} := _}, parse_result(Out1)),
ok.
@@ -137,3 +153,21 @@ declare_qq(Ch, Q) ->
rabbitmq_queues(Config, N, Args) ->
rabbit_ct_broker_helpers:rabbitmq_queues(Config, N, ["--silent" | Args]).
+
+publish_confirm(Ch, QName) ->
+ ok = amqp_channel:cast(Ch,
+ #'basic.publish'{routing_key = list_to_binary(QName)},
+ #amqp_msg{props = #'P_basic'{delivery_mode = 2},
+ payload = <<"msg">>}),
+ amqp_channel:register_confirm_handler(Ch, self()),
+ ct:pal("waiting for confirms from ~s", [QName]),
+ receive
+ #'basic.ack'{} ->
+ ct:pal("CONFIRMED! ~s", [QName]),
+ ok;
+ #'basic.nack'{} ->
+ ct:pal("NOT CONFIRMED! ~s", [QName]),
+ fail
+ after 10000 ->
+ exit(confirm_timeout)
+ end.
diff --git a/deps/rabbit/test/rabbitmqctl_integration_SUITE.erl b/deps/rabbit/test/rabbitmqctl_integration_SUITE.erl
index 9c689f5667..4e7c7ce3ea 100644
--- a/deps/rabbit/test/rabbitmqctl_integration_SUITE.erl
+++ b/deps/rabbit/test/rabbitmqctl_integration_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmqctl_integration_SUITE).
diff --git a/deps/rabbit/test/rabbitmqctl_shutdown_SUITE.erl b/deps/rabbit/test/rabbitmqctl_shutdown_SUITE.erl
index 6365f91d47..bace3291b4 100644
--- a/deps/rabbit/test/rabbitmqctl_shutdown_SUITE.erl
+++ b/deps/rabbit/test/rabbitmqctl_shutdown_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmqctl_shutdown_SUITE).
diff --git a/deps/rabbit/test/signal_handling_SUITE.erl b/deps/rabbit/test/signal_handling_SUITE.erl
index 551f456039..8cc25fa344 100644
--- a/deps/rabbit/test/signal_handling_SUITE.erl
+++ b/deps/rabbit/test/signal_handling_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(signal_handling_SUITE).
diff --git a/deps/rabbit/test/simple_ha_SUITE.erl b/deps/rabbit/test/simple_ha_SUITE.erl
index 8b2c1d6ebb..4529a801e9 100644
--- a/deps/rabbit/test/simple_ha_SUITE.erl
+++ b/deps/rabbit/test/simple_ha_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(simple_ha_SUITE).
@@ -30,8 +30,7 @@ groups() ->
{cluster_size_2, [], [
rapid_redeclare,
declare_synchrony,
- clean_up_exclusive_queues,
- clean_up_and_redeclare_exclusive_queues_on_other_nodes
+ clean_up_exclusive_queues
]},
{cluster_size_3, [], [
consume_survives_stop,
@@ -150,43 +149,6 @@ clean_up_exclusive_queues(Config) ->
[[],[]] = rabbit_ct_broker_helpers:rpc_all(Config, rabbit_amqqueue, list, []),
ok.
-clean_up_and_redeclare_exclusive_queues_on_other_nodes(Config) ->
- QueueCount = 10,
- QueueNames = lists:map(fun(N) ->
- NBin = erlang:integer_to_binary(N),
- <<"exclusive-q-", NBin/binary>>
- end, lists:seq(1, QueueCount)),
- [A, B] = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
- Conn = rabbit_ct_client_helpers:open_unmanaged_connection(Config, A),
- {ok, Ch} = amqp_connection:open_channel(Conn),
-
- LocationMinMasters = [
- {<<"x-queue-master-locator">>, longstr, <<"min-masters">>}
- ],
- lists:foreach(fun(QueueName) ->
- declare_exclusive(Ch, QueueName, LocationMinMasters),
- subscribe(Ch, QueueName)
- end, QueueNames),
-
- ok = rabbit_ct_broker_helpers:kill_node(Config, B),
-
- Cancels = receive_cancels([]),
- ?assert(length(Cancels) > 0),
-
- RemaniningQueues = rabbit_ct_broker_helpers:rpc(Config, A, rabbit_amqqueue, list, []),
-
- ?assertEqual(length(RemaniningQueues), QueueCount - length(Cancels)),
-
- lists:foreach(fun(QueueName) ->
- declare_exclusive(Ch, QueueName, LocationMinMasters),
- true = rabbit_ct_client_helpers:publish(Ch, QueueName, 1),
- subscribe(Ch, QueueName)
- end, QueueNames),
- Messages = receive_messages([]),
- ?assertEqual(10, length(Messages)),
- ok = rabbit_ct_client_helpers:close_connection(Conn).
-
-
consume_survives_stop(Cf) -> consume_survives(Cf, fun stop/2, true).
consume_survives_sigkill(Cf) -> consume_survives(Cf, fun sigkill/2, true).
consume_survives_policy(Cf) -> consume_survives(Cf, fun policy/2, true).
diff --git a/deps/rabbit/test/single_active_consumer_SUITE.erl b/deps/rabbit/test/single_active_consumer_SUITE.erl
index 59f2b6e83d..2aabc00a8a 100644
--- a/deps/rabbit/test/single_active_consumer_SUITE.erl
+++ b/deps/rabbit/test/single_active_consumer_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(single_active_consumer_SUITE).
diff --git a/deps/rabbit/test/sync_detection_SUITE.erl b/deps/rabbit/test/sync_detection_SUITE.erl
index 55a86b7b3d..4ca06fa7ec 100644
--- a/deps/rabbit/test/sync_detection_SUITE.erl
+++ b/deps/rabbit/test/sync_detection_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(sync_detection_SUITE).
diff --git a/deps/rabbit/test/term_to_binary_compat_prop_SUITE.erl b/deps/rabbit/test/term_to_binary_compat_prop_SUITE.erl
index 2f56f56189..2937aed722 100644
--- a/deps/rabbit/test/term_to_binary_compat_prop_SUITE.erl
+++ b/deps/rabbit/test/term_to_binary_compat_prop_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
@@ -10,7 +10,7 @@
-compile(export_all).
--include("rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("proper/include/proper.hrl").
diff --git a/deps/rabbit/test/topic_permission_SUITE.erl b/deps/rabbit/test/topic_permission_SUITE.erl
index 2f123fd7f6..f6c9974ee3 100644
--- a/deps/rabbit/test/topic_permission_SUITE.erl
+++ b/deps/rabbit/test/topic_permission_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(topic_permission_SUITE).
diff --git a/deps/rabbit/test/unit_access_control_SUITE.erl b/deps/rabbit/test/unit_access_control_SUITE.erl
index af8f481083..23d32fc987 100644
--- a/deps/rabbit/test/unit_access_control_SUITE.erl
+++ b/deps/rabbit/test/unit_access_control_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_access_control_SUITE).
diff --git a/deps/rabbit/test/unit_access_control_authn_authz_context_propagation_SUITE.erl b/deps/rabbit/test/unit_access_control_authn_authz_context_propagation_SUITE.erl
index 9cb1ad7267..758fe23695 100644
--- a/deps/rabbit/test/unit_access_control_authn_authz_context_propagation_SUITE.erl
+++ b/deps/rabbit/test/unit_access_control_authn_authz_context_propagation_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_access_control_authn_authz_context_propagation_SUITE).
diff --git a/deps/rabbit/test/unit_access_control_credential_validation_SUITE.erl b/deps/rabbit/test/unit_access_control_credential_validation_SUITE.erl
index 6a6a07836c..a243063865 100644
--- a/deps/rabbit/test/unit_access_control_credential_validation_SUITE.erl
+++ b/deps/rabbit/test/unit_access_control_credential_validation_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_access_control_credential_validation_SUITE).
diff --git a/deps/rabbit/test/unit_amqp091_content_framing_SUITE.erl b/deps/rabbit/test/unit_amqp091_content_framing_SUITE.erl
index d483dbdd06..6eb13bb027 100644
--- a/deps/rabbit/test/unit_amqp091_content_framing_SUITE.erl
+++ b/deps/rabbit/test/unit_amqp091_content_framing_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_amqp091_content_framing_SUITE).
diff --git a/deps/rabbit/test/unit_amqp091_server_properties_SUITE.erl b/deps/rabbit/test/unit_amqp091_server_properties_SUITE.erl
index 036fb8ce28..e2021d1669 100644
--- a/deps/rabbit/test/unit_amqp091_server_properties_SUITE.erl
+++ b/deps/rabbit/test/unit_amqp091_server_properties_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_amqp091_server_properties_SUITE).
diff --git a/deps/rabbit/test/unit_app_management_SUITE.erl b/deps/rabbit/test/unit_app_management_SUITE.erl
index e08f151d57..bd1969f3c6 100644
--- a/deps/rabbit/test/unit_app_management_SUITE.erl
+++ b/deps/rabbit/test/unit_app_management_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_app_management_SUITE).
diff --git a/deps/rabbit/test/unit_classic_mirrored_queue_sync_throttling_SUITE.erl b/deps/rabbit/test/unit_classic_mirrored_queue_sync_throttling_SUITE.erl
new file mode 100644
index 0000000000..a780dcfb89
--- /dev/null
+++ b/deps/rabbit/test/unit_classic_mirrored_queue_sync_throttling_SUITE.erl
@@ -0,0 +1,88 @@
+-module(unit_classic_mirrored_queue_sync_throttling_SUITE).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-compile(export_all).
+
+all() ->
+ [
+ maybe_master_batch_send,
+ get_time_diff,
+ append_to_acc
+ ].
+
+maybe_master_batch_send(_Config) ->
+ SyncBatchSize = 4096,
+ SyncThroughput = 2000,
+ QueueLen = 10000,
+ ?assertEqual(
+ true, %% Message reach the last one in the queue
+ rabbit_mirror_queue_sync:maybe_master_batch_send({[], 0, {0, 0, SyncThroughput}, {QueueLen, QueueLen}, 0}, SyncBatchSize)),
+ ?assertEqual(
+ true, %% # messages batched is less than batch size; and total message size has reached the batch size
+ rabbit_mirror_queue_sync:maybe_master_batch_send({[], 0, {0, 0, SyncThroughput}, {SyncBatchSize, QueueLen}, 0}, SyncBatchSize)),
+ TotalBytes0 = SyncThroughput + 1,
+ Curr0 = 1,
+ ?assertEqual(
+ true, %% Total batch size exceed max sync throughput
+ rabbit_mirror_queue_sync:maybe_master_batch_send({[], 0, {TotalBytes0, 0, SyncThroughput}, {Curr0, QueueLen}, 0}, SyncBatchSize)),
+ TotalBytes1 = 1,
+ Curr1 = 1,
+ ?assertEqual(
+ false, %% # messages batched is less than batch size; and total bytes is less than sync throughput
+ rabbit_mirror_queue_sync:maybe_master_batch_send({[], 0, {TotalBytes1, 0, SyncThroughput}, {Curr1, QueueLen}, 0}, SyncBatchSize)),
+ ok.
+
+get_time_diff(_Config) ->
+ TotalBytes0 = 100,
+ Interval0 = 1000, %% ms
+ MaxSyncThroughput0 = 100, %% bytes/s
+ ?assertEqual(%% Used throughput = 100 / 1000 * 1000 = 100 bytes/s; matched max throughput
+ 0, %% => no need to pause queue sync
+ rabbit_mirror_queue_sync:get_time_diff(TotalBytes0, Interval0, MaxSyncThroughput0)),
+
+ TotalBytes1 = 100,
+ Interval1 = 1000, %% ms
+ MaxSyncThroughput1 = 200, %% bytes/s
+ ?assertEqual( %% Used throughput = 100 / 1000 * 1000 = 100 bytes/s; less than max throughput
+ 0, %% => no need to pause queue sync
+ rabbit_mirror_queue_sync:get_time_diff(TotalBytes1, Interval1, MaxSyncThroughput1)),
+
+ TotalBytes2 = 100,
+ Interval2 = 1000, %% ms
+ MaxSyncThroughput2 = 50, %% bytes/s
+ ?assertEqual( %% Used throughput = 100 / 1000 * 1000 = 100 bytes/s; greater than max throughput
+ 1000, %% => pause queue sync for 1000 ms
+ rabbit_mirror_queue_sync:get_time_diff(TotalBytes2, Interval2, MaxSyncThroughput2)),
+ ok.
+
+append_to_acc(_Config) ->
+ Msg = #basic_message{
+ id = 1,
+ content = #content{
+ properties = #'P_basic'{
+ priority = 2
+ },
+ payload_fragments_rev = [[<<"1234567890">>]] %% 10 bytes
+ },
+ is_persistent = true
+ },
+ BQDepth = 10,
+ SyncThroughput_0 = 0,
+ FoldAcc1 = {[], 0, {0, erlang:monotonic_time(), SyncThroughput_0}, {0, BQDepth}, erlang:monotonic_time()},
+ {_, _, {TotalBytes1, _, _}, _, _} = rabbit_mirror_queue_sync:append_to_acc(Msg, {}, false, FoldAcc1),
+ ?assertEqual(0, TotalBytes1), %% Skipping calculating TotalBytes for the pending batch as SyncThroughput is 0.
+
+ SyncThroughput = 100,
+ FoldAcc2 = {[], 0, {0, erlang:monotonic_time(), SyncThroughput}, {0, BQDepth}, erlang:monotonic_time()},
+ {_, _, {TotalBytes2, _, _}, _, _} = rabbit_mirror_queue_sync:append_to_acc(Msg, {}, false, FoldAcc2),
+ ?assertEqual(10, TotalBytes2), %% Message size is added to existing TotalBytes
+
+ FoldAcc3 = {[], 0, {TotalBytes2, erlang:monotonic_time(), SyncThroughput}, {0, BQDepth}, erlang:monotonic_time()},
+ {_, _, {TotalBytes3, _, _}, _, _} = rabbit_mirror_queue_sync:append_to_acc(Msg, {}, false, FoldAcc3),
+ ?assertEqual(TotalBytes2 + 10, TotalBytes3), %% Message size is added to existing TotalBytes
+ ok. \ No newline at end of file
diff --git a/deps/rabbit/test/unit_classic_mirrored_queue_throughput_SUITE.erl b/deps/rabbit/test/unit_classic_mirrored_queue_throughput_SUITE.erl
new file mode 100644
index 0000000000..7e10b5f5d9
--- /dev/null
+++ b/deps/rabbit/test/unit_classic_mirrored_queue_throughput_SUITE.erl
@@ -0,0 +1,29 @@
+-module(unit_classic_mirrored_queue_throughput_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-compile(export_all).
+
+all() ->
+ [
+ default_max_sync_throughput
+ ].
+
+default_max_sync_throughput(_Config) ->
+ ?assertEqual(
+ 0,
+ rabbit_mirror_queue_misc:default_max_sync_throughput()),
+ application:set_env(rabbit, mirroring_sync_max_throughput, 100),
+ ?assertEqual(
+ 100,
+ rabbit_mirror_queue_misc:default_max_sync_throughput()),
+ application:set_env(rabbit, mirroring_sync_max_throughput, "100MiB"),
+ ?assertEqual(
+ 100*1024*1024,
+ rabbit_mirror_queue_misc:default_max_sync_throughput()),
+ application:set_env(rabbit, mirroring_sync_max_throughput, "100MB"),
+ ?assertEqual(
+ 100000000,
+ rabbit_mirror_queue_misc:default_max_sync_throughput()),
+ ok.
diff --git a/deps/rabbit/test/unit_cluster_formation_locking_mocks_SUITE.erl b/deps/rabbit/test/unit_cluster_formation_locking_mocks_SUITE.erl
index 41dd685694..d8c95c176d 100644
--- a/deps/rabbit/test/unit_cluster_formation_locking_mocks_SUITE.erl
+++ b/deps/rabbit/test/unit_cluster_formation_locking_mocks_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_cluster_formation_locking_mocks_SUITE).
diff --git a/deps/rabbit/test/unit_collections_SUITE.erl b/deps/rabbit/test/unit_collections_SUITE.erl
index 1cbf65efce..3ff3a5849a 100644
--- a/deps/rabbit/test/unit_collections_SUITE.erl
+++ b/deps/rabbit/test/unit_collections_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_collections_SUITE).
diff --git a/deps/rabbit/test/unit_config_value_encryption_SUITE.erl b/deps/rabbit/test/unit_config_value_encryption_SUITE.erl
index 7536005797..3efbec7a8c 100644
--- a/deps/rabbit/test/unit_config_value_encryption_SUITE.erl
+++ b/deps/rabbit/test/unit_config_value_encryption_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_config_value_encryption_SUITE).
@@ -12,6 +12,10 @@
-compile(export_all).
+%% This cipher is listed as supported, but doesn't actually work.
+%% OTP bug: https://bugs.erlang.org/browse/ERL-1478
+-define(SKIPPED_CIPHERS, [aes_ige256]).
+
all() ->
[
{group, sequential_tests}
@@ -48,7 +52,7 @@ end_per_testcase(_TC, _Config) ->
decrypt_config(_Config) ->
%% Take all available block ciphers.
Hashes = rabbit_pbe:supported_hashes(),
- Ciphers = rabbit_pbe:supported_ciphers(),
+ Ciphers = rabbit_pbe:supported_ciphers() -- ?SKIPPED_CIPHERS,
Iterations = [1, 10, 100, 1000],
%% Loop through all hashes, ciphers and iterations.
_ = [begin
@@ -106,7 +110,7 @@ decrypt_start_app_file(Config) ->
do_decrypt_start_app(Config, Passphrase) ->
%% Configure rabbit for decrypting configuration.
application:set_env(rabbit, config_entry_decoder, [
- {cipher, aes_cbc256},
+ {cipher, aes_256_cbc},
{hash, sha512},
{iterations, 1000},
{passphrase, Passphrase}
@@ -118,8 +122,9 @@ do_decrypt_start_app(Config, Passphrase) ->
%% We expect a failure *after* the decrypting has been done.
try
rabbit:start_apps([rabbit_shovel_test], #{rabbit => temporary})
- catch _:_ ->
- ok
+ catch _:Err ->
+ ct:pal("~s: start_apps failed with ~p", [Err]),
+ ok
end,
%% Check if the values have been decrypted.
{ok, Shovels} = application:get_env(rabbit_shovel_test, shovels),
diff --git a/deps/rabbit/test/unit_connection_tracking_SUITE.erl b/deps/rabbit/test/unit_connection_tracking_SUITE.erl
index 4ea1744fa7..2e0f31a05a 100644
--- a/deps/rabbit/test/unit_connection_tracking_SUITE.erl
+++ b/deps/rabbit/test/unit_connection_tracking_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_connection_tracking_SUITE).
diff --git a/deps/rabbit/test/unit_credit_flow_SUITE.erl b/deps/rabbit/test/unit_credit_flow_SUITE.erl
index ffad444dde..aae84f0077 100644
--- a/deps/rabbit/test/unit_credit_flow_SUITE.erl
+++ b/deps/rabbit/test/unit_credit_flow_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_credit_flow_SUITE).
diff --git a/deps/rabbit/test/unit_disk_monitor_SUITE.erl b/deps/rabbit/test/unit_disk_monitor_SUITE.erl
index bc21114c12..7a746349ca 100644
--- a/deps/rabbit/test/unit_disk_monitor_SUITE.erl
+++ b/deps/rabbit/test/unit_disk_monitor_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_disk_monitor_SUITE).
@@ -67,6 +67,12 @@ set_disk_free_limit_command(Config) ->
?MODULE, set_disk_free_limit_command1, [Config]).
set_disk_free_limit_command1(_Config) ->
+ F = fun () ->
+ DiskFree = rabbit_disk_monitor:get_disk_free(),
+ DiskFree =/= unknown
+ end,
+ rabbit_ct_helpers:await_condition(F),
+
%% Use an integer
rabbit_disk_monitor:set_disk_free_limit({mem_relative, 1}),
disk_free_limit_to_total_memory_ratio_is(1),
@@ -84,7 +90,8 @@ set_disk_free_limit_command1(_Config) ->
passed.
disk_free_limit_to_total_memory_ratio_is(MemRatio) ->
+ DiskFreeLimit = rabbit_disk_monitor:get_disk_free_limit(),
ExpectedLimit = MemRatio * vm_memory_monitor:get_total_memory(),
% Total memory is unstable, so checking order
- true = ExpectedLimit/rabbit_disk_monitor:get_disk_free_limit() < 1.2,
- true = ExpectedLimit/rabbit_disk_monitor:get_disk_free_limit() > 0.98.
+ true = ExpectedLimit/DiskFreeLimit < 1.2,
+ true = ExpectedLimit/DiskFreeLimit > 0.98.
diff --git a/deps/rabbit/test/unit_disk_monitor_mocks_SUITE.erl b/deps/rabbit/test/unit_disk_monitor_mocks_SUITE.erl
index af78d0d134..ae16cbb379 100644
--- a/deps/rabbit/test/unit_disk_monitor_mocks_SUITE.erl
+++ b/deps/rabbit/test/unit_disk_monitor_mocks_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_disk_monitor_mocks_SUITE).
@@ -88,7 +88,7 @@ disk_monitor_enable1(_Config) ->
application:set_env(rabbit, disk_monitor_failure_retry_interval, 100),
ok = rabbit_sup:stop_child(rabbit_disk_monitor_sup),
ok = rabbit_sup:start_delayed_restartable_child(rabbit_disk_monitor, [1000]),
- undefined = rabbit_disk_monitor:get_disk_free(),
+ unknown = rabbit_disk_monitor:get_disk_free(),
Cmd = case os:type() of
{win32, _} -> " Le volume dans le lecteur C n’a pas de nom.\n"
" Le numéro de série du volume est 707D-5BDC\n"
diff --git a/deps/rabbit/test/unit_file_handle_cache_SUITE.erl b/deps/rabbit/test/unit_file_handle_cache_SUITE.erl
index f2252aa2b5..489cf948c8 100644
--- a/deps/rabbit/test/unit_file_handle_cache_SUITE.erl
+++ b/deps/rabbit/test/unit_file_handle_cache_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_file_handle_cache_SUITE).
diff --git a/deps/rabbit/test/unit_gen_server2_SUITE.erl b/deps/rabbit/test/unit_gen_server2_SUITE.erl
index babd340f19..65e13818ec 100644
--- a/deps/rabbit/test/unit_gen_server2_SUITE.erl
+++ b/deps/rabbit/test/unit_gen_server2_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_gen_server2_SUITE).
diff --git a/deps/rabbit/test/unit_gm_SUITE.erl b/deps/rabbit/test/unit_gm_SUITE.erl
index 74400ddaa5..6ce552f039 100644
--- a/deps/rabbit/test/unit_gm_SUITE.erl
+++ b/deps/rabbit/test/unit_gm_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_gm_SUITE).
diff --git a/deps/rabbit/test/unit_log_config_SUITE.erl b/deps/rabbit/test/unit_log_config_SUITE.erl
deleted file mode 100644
index 6be403fd3e..0000000000
--- a/deps/rabbit/test/unit_log_config_SUITE.erl
+++ /dev/null
@@ -1,837 +0,0 @@
-%% This Source Code Form is subject to the terms of the Mozilla Public
-%% License, v. 2.0. If a copy of the MPL was not distributed with this
-%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
-%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
-%%
-
--module(unit_log_config_SUITE).
-
--include_lib("common_test/include/ct.hrl").
--include_lib("eunit/include/eunit.hrl").
-
--compile(export_all).
-
-all() ->
- [
- default,
- env_var_tty,
- config_file_handler,
- config_file_handler_level,
- config_file_handler_rotation,
- config_console_handler,
- config_exchange_handler,
- config_syslog_handler,
- config_syslog_handler_options,
- config_multiple_handlers,
-
- env_var_overrides_config,
- env_var_disable_log,
-
- config_sinks_level,
- config_sink_file,
- config_sink_file_override_config_handler_file,
-
- config_handlers_merged_with_lager_handlers,
- sink_handlers_merged_with_lager_extra_sinks_handlers,
- sink_file_rewrites_file_backends
- ].
-
-init_per_testcase(_, Config) ->
- application:load(rabbit),
- application:load(lager),
- application:unset_env(rabbit, log),
- application:unset_env(rabbit, lager_log_root),
- application:unset_env(rabbit, lager_default_file),
- application:unset_env(rabbit, lager_upgrade_file),
- application:unset_env(lager, handlers),
- application:unset_env(lager, rabbit_handlers),
- application:unset_env(lager, extra_sinks),
- unset_logs_var_origin(),
- Config.
-
-end_per_testcase(_, Config) ->
- application:unset_env(rabbit, log),
- application:unset_env(rabbit, lager_log_root),
- application:unset_env(rabbit, lager_default_file),
- application:unset_env(rabbit, lager_upgrade_file),
- application:unset_env(lager, handlers),
- application:unset_env(lager, rabbit_handlers),
- application:unset_env(lager, extra_sinks),
- unset_logs_var_origin(),
- application:unload(rabbit),
- application:unload(lager),
- Config.
-
-sink_file_rewrites_file_backends(_) ->
- application:set_env(rabbit, log, [
- %% Disable rabbit file handler
- {file, [{file, false}]},
- {categories, [{federation, [{file, "federation.log"}, {level, warning}]}]}
- ]),
-
- LagerHandlers = [
- {lager_file_backend, [{file, "lager_file.log"}, {level, error}]},
- {lager_file_backend, [{file, "lager_file_1.log"}, {level, error}]},
- {lager_console_backend, [{level, info}]},
- {lager_exchange_backend, [{level, info}]}
- ],
- application:set_env(lager, handlers, LagerHandlers),
- rabbit_lager:configure_lager(),
-
- ExpectedSinks = sort_sinks(sink_rewrite_sinks()),
- ?assertEqual(ExpectedSinks, sort_sinks(application:get_env(lager, extra_sinks, undefined))).
-
-sink_rewrite_sinks() ->
- [{error_logger_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_channel_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_connection_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_feature_flags_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_federation_lager_event,
- [{handlers,[
- {lager_file_backend,
- [{date, ""},
- {file, "federation.log"},
- {formatter_config, formatter_config(file)},
- {level, warning},
- {size, 0}]},
- {lager_console_backend, [{level, warning}]},
- {lager_exchange_backend, [{level, warning}]}
- ]},
- {rabbit_handlers,[
- {lager_file_backend,
- [{date, ""},
- {file, "federation.log"},
- {formatter_config, formatter_config(file)},
- {level, warning},
- {size, 0}]},
- {lager_console_backend, [{level, warning}]},
- {lager_exchange_backend, [{level, warning}]}
- ]}]},
- {rabbit_log_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_ldap_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_mirroring_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_osiris_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_prelaunch_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_queue_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_ra_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_shovel_lager_event,
- [{handlers, [{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_upgrade_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]}
- ].
-
-sink_handlers_merged_with_lager_extra_sinks_handlers(_) ->
- DefaultLevel = debug,
- application:set_env(rabbit, log, [
- {file, [{file, "rabbit_file.log"}, {level, DefaultLevel}]},
- {console, [{enabled, true}, {level, error}]},
- {exchange, [{enabled, true}, {level, error}]},
- {categories, [
- {connection, [{level, debug}]},
- {channel, [{level, warning}, {file, "channel_log.log"}]}
- ]}
- ]),
-
- LagerSinks = [
- {rabbit_log_connection_lager_event,
- [{handlers,
- [{lager_file_backend,
- [{file, "connection_lager.log"},
- {level, info}]}]}]},
- {rabbit_log_channel_lager_event,
- [{handlers,
- [{lager_console_backend, [{level, debug}]},
- {lager_exchange_backend, [{level, debug}]},
- {lager_file_backend, [{level, error},
- {file, "channel_lager.log"}]}]}]}],
-
- application:set_env(lager, extra_sinks, LagerSinks),
- rabbit_lager:configure_lager(),
-
- ExpectedSinks = sort_sinks([
- {error_logger_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_channel_lager_event,
- [{handlers,[
- {lager_console_backend, [{level, error},
- {formatter_config, formatter_config(console)}]},
- {lager_exchange_backend, [{level, error},
- {formatter_config, formatter_config(exchange)}]},
- {lager_file_backend,
- [{date, ""},
- {file, "channel_log.log"},
- {formatter_config, formatter_config(file)},
- {level, warning},
- {size, 0}]},
- {lager_console_backend, [{level, debug}]},
- {lager_exchange_backend, [{level, debug}]},
- {lager_file_backend, [{level, error},
- {file, "channel_lager.log"}]}
- ]},
- {rabbit_handlers,[
- {lager_console_backend, [{level, error},
- {formatter_config, formatter_config(console)}]},
- {lager_exchange_backend, [{level, error},
- {formatter_config, formatter_config(exchange)}]},
- {lager_file_backend,
- [{date, ""},
- {file, "channel_log.log"},
- {formatter_config, formatter_config(file)},
- {level, warning},
- {size, 0}]}]}
- ]},
- {rabbit_log_connection_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,debug]},
- {lager_file_backend, [{file, "connection_lager.log"}, {level, info}]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,debug]}]}]},
- {rabbit_log_feature_flags_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_federation_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_ldap_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_mirroring_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_osiris_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_prelaunch_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_queue_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_ra_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_shovel_lager_event,
- [{handlers, [{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_upgrade_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]}]),
-
- ?assertEqual(ExpectedSinks, sort_sinks(application:get_env(lager, extra_sinks, undefined))).
-
-config_handlers_merged_with_lager_handlers(_) ->
- application:set_env(rabbit, log, [
- {file, [{file, "rabbit_file.log"}, {level, debug}]},
- {console, [{enabled, true}, {level, error}]},
- {exchange, [{enabled, true}, {level, error}]},
- {syslog, [{enabled, true}]}
- ]),
-
- LagerHandlers = [
- {lager_file_backend, [{file, "lager_file.log"}, {level, info}]},
- {lager_console_backend, [{level, info}]},
- {lager_exchange_backend, [{level, info}]},
- {lager_exchange_backend, [{level, info}]}
- ],
- application:set_env(lager, handlers, LagerHandlers),
- rabbit_lager:configure_lager(),
-
- FileHandlers = default_expected_handlers("rabbit_file.log", debug),
- ConsoleHandlers = expected_console_handler(error),
- RabbitHandlers = expected_rabbit_handler(error),
- SyslogHandlers = expected_syslog_handler(),
-
- ExpectedRabbitHandlers = sort_handlers(FileHandlers ++ ConsoleHandlers ++ RabbitHandlers ++ SyslogHandlers),
- ExpectedHandlers = sort_handlers(ExpectedRabbitHandlers ++ LagerHandlers),
-
- ?assertEqual(ExpectedRabbitHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))).
-
-config_sinks_level(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
-
- application:set_env(rabbit, log, [
- {categories, [
- {connection, [{level, warning}]},
- {channel, [{level, debug}]},
- {mirroring, [{level, error}]}
- ]}
- ]),
-
- rabbit_lager:configure_lager(),
-
- ExpectedSinks = sort_sinks(level_sinks()),
- ?assertEqual(ExpectedSinks, sort_sinks(application:get_env(lager, extra_sinks, undefined))).
-
-level_sinks() ->
- [{error_logger_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_channel_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,debug]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,debug]}]}]},
- {rabbit_log_connection_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,warning]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,warning]}]}]},
- {rabbit_log_feature_flags_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_federation_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_ldap_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_mirroring_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,error]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,error]}]}]},
- {rabbit_log_osiris_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_prelaunch_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_queue_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_ra_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_shovel_lager_event,
- [{handlers, [{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,
- [lager_event,info]}]}]},
- {rabbit_log_upgrade_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]}
- ].
-
-config_sink_file(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
-
- DefaultLevel = error,
- application:set_env(rabbit, log, [
- {console, [{enabled, true}]},
- {exchange, [{enabled, true}]},
- {file, [{level, DefaultLevel}]},
- {categories, [
- {connection, [{file, "connection.log"}, {level, warning}]}
- ]}
- ]),
-
- rabbit_lager:configure_lager(),
-
- ExpectedSinks = sort_sinks(file_sinks(DefaultLevel)),
- ?assertEqual(ExpectedSinks, sort_sinks(application:get_env(lager, extra_sinks, undefined))).
-
-config_sink_file_override_config_handler_file(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
-
- NonDefaultLogFile = "rabbit_not_default.log",
-
- DefaultLevel = error,
- application:set_env(rabbit, log, [
- {file, [{file, NonDefaultLogFile}, {level, DefaultLevel}]},
- {console, [{enabled, true}]},
- {exchange, [{enabled, true}]},
- {categories, [
- {connection, [{file, "connection.log"}, {level, warning}]}
- ]}
- ]),
-
- rabbit_lager:configure_lager(),
-
- ExpectedSinks = sort_sinks(file_sinks(DefaultLevel)),
- ?assertEqual(ExpectedSinks, sort_sinks(application:get_env(lager, extra_sinks, undefined))).
-
-file_sinks() ->
- file_sinks(info).
-
-file_sinks(DefaultLevel) ->
- [{error_logger_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_channel_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_connection_lager_event,
- [{handlers,[
- {lager_console_backend, [{level, warning},
- {formatter_config, formatter_config(console)}]},
- {lager_exchange_backend, [{level, warning},
- {formatter_config, formatter_config(exchange)}]},
- {lager_file_backend,
- [{date, ""},
- {file, "connection.log"},
- {formatter_config, formatter_config(file)},
- {level, error},
- {size, 0}]}]},
- {rabbit_handlers,[
- {lager_console_backend, [{level, warning},
- {formatter_config, formatter_config(console)}]},
- {lager_exchange_backend, [{level, warning},
- {formatter_config, formatter_config(exchange)}]},
- {lager_file_backend,
- [{date, ""},
- {file, "connection.log"},
- {formatter_config, formatter_config(backend)},
- {level, error},
- {size, 0}]}]}
- ]},
- {rabbit_log_feature_flags_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_federation_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_ldap_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_mirroring_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_osiris_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_prelaunch_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_queue_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_ra_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_shovel_lager_event,
- [{handlers, [{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]},
- {rabbit_log_upgrade_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,DefaultLevel]}]}]}
- ].
-
-config_multiple_handlers(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
-
- application:set_env(rabbit, log, [
- %% Disable file output
- {file, [{file, false}]},
- %% Enable console output
- {console, [{enabled, true}]},
- %% Enable exchange output
- {exchange, [{enabled, true}]},
- %% Enable a syslog output
- {syslog, [{enabled, true}, {level, error}]}]),
-
- rabbit_lager:configure_lager(),
-
- ConsoleHandlers = expected_console_handler(),
- RabbitHandlers = expected_rabbit_handler(),
- SyslogHandlers = expected_syslog_handler(error),
-
- ExpectedHandlers = sort_handlers(SyslogHandlers ++ ConsoleHandlers ++ RabbitHandlers),
-
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-config_console_handler(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
- application:set_env(rabbit, log, [{console, [{enabled, true}]}]),
-
- rabbit_lager:configure_lager(),
-
- FileHandlers = default_expected_handlers(DefaultLogFile),
- ConsoleHandlers = expected_console_handler(),
-
- ExpectedHandlers = sort_handlers(FileHandlers ++ ConsoleHandlers),
-
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-config_exchange_handler(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
- application:set_env(rabbit, log, [{exchange, [{enabled, true}]}]),
-
- rabbit_lager:configure_lager(),
-
- FileHandlers = default_expected_handlers(DefaultLogFile),
- ExchangeHandlers = expected_rabbit_handler(),
-
- ExpectedHandlers = sort_handlers(FileHandlers ++ ExchangeHandlers),
-
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-expected_console_handler() ->
- expected_console_handler(debug).
-
-expected_console_handler(Level) ->
- [{lager_console_backend, [{level, Level},
- {formatter_config, formatter_config(console)}]}].
-
-expected_rabbit_handler() ->
- expected_rabbit_handler(debug).
-
-expected_rabbit_handler(Level) ->
- [{lager_exchange_backend, [{level, Level},
- {formatter_config, formatter_config(exchange)}]}].
-
-config_syslog_handler(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
- application:set_env(rabbit, log, [{syslog, [{enabled, true}]}]),
-
- rabbit_lager:configure_lager(),
-
- FileHandlers = default_expected_handlers(DefaultLogFile),
- SyslogHandlers = expected_syslog_handler(),
-
- ExpectedHandlers = sort_handlers(FileHandlers ++ SyslogHandlers),
-
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-config_syslog_handler_options(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
- application:set_env(rabbit, log, [{syslog, [{enabled, true},
- {level, warning}]}]),
-
- rabbit_lager:configure_lager(),
-
- FileHandlers = default_expected_handlers(DefaultLogFile),
- SyslogHandlers = expected_syslog_handler(warning),
-
- ExpectedHandlers = sort_handlers(FileHandlers ++ SyslogHandlers),
-
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-expected_syslog_handler() ->
- expected_syslog_handler(debug).
-
-expected_syslog_handler(Level) ->
- [{syslog_lager_backend, [Level,
- {},
- {lager_default_formatter, syslog_formatter_config()}]}].
-
-env_var_overrides_config(_) ->
- EnvLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, EnvLogFile),
-
- ConfigLogFile = "rabbit_not_default.log",
- application:set_env(rabbit, log, [{file, [{file, ConfigLogFile}]}]),
-
- set_logs_var_origin(environment),
- rabbit_lager:configure_lager(),
-
- ExpectedHandlers = default_expected_handlers(EnvLogFile),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-env_var_disable_log(_) ->
- application:set_env(rabbit, lager_default_file, false),
-
- ConfigLogFile = "rabbit_not_default.log",
- application:set_env(rabbit, log, [{file, [{file, ConfigLogFile}]}]),
-
- set_logs_var_origin(environment),
- rabbit_lager:configure_lager(),
-
- ExpectedHandlers = [],
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-config_file_handler(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
-
- NonDefaultLogFile = "rabbit_not_default.log",
- application:set_env(rabbit, log, [{file, [{file, NonDefaultLogFile}]}]),
-
- rabbit_lager:configure_lager(),
-
- ExpectedHandlers = default_expected_handlers(NonDefaultLogFile),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-config_file_handler_level(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
-
- application:set_env(rabbit, log, [{file, [{level, warning}]}]),
- rabbit_lager:configure_lager(),
-
- ExpectedHandlers = default_expected_handlers(DefaultLogFile, warning),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-config_file_handler_rotation(_) ->
- DefaultLogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, DefaultLogFile),
-
- application:set_env(rabbit, log, [{file, [{date, "$D0"}, {size, 5000}, {count, 10}]}]),
- rabbit_lager:configure_lager(),
-
- ExpectedHandlers = sort_handlers(default_expected_handlers(DefaultLogFile, debug, 5000, "$D0", [{count, 10}])),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))).
-
-default(_) ->
- LogRoot = "/tmp/log_base",
- application:set_env(rabbit, lager_log_root, LogRoot),
- LogFile = "rabbit_default.log",
- application:set_env(rabbit, lager_default_file, LogFile),
- LogUpgradeFile = "rabbit_default_upgrade.log",
- application:set_env(rabbit, lager_upgrade_file, LogUpgradeFile),
-
- rabbit_lager:configure_lager(),
-
- ExpectedHandlers = default_expected_handlers(LogFile),
- LogRoot = application:get_env(lager, log_root, undefined),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))),
-
- ExpectedSinks = default_expected_sinks(LogUpgradeFile),
- ?assertEqual(ExpectedSinks, sort_sinks(application:get_env(lager, extra_sinks, undefined))).
-
-default_expected_handlers(File) ->
- default_expected_handlers(File, debug, 0, "").
-default_expected_handlers(File, Level) ->
- default_expected_handlers(File, Level, 0, "").
-default_expected_handlers(File, Level, RotSize, RotDate) ->
- default_expected_handlers(File, Level, RotSize, RotDate, []).
-default_expected_handlers(File, Level, RotSize, RotDate, Extra) ->
- [{lager_file_backend,
- [{date, RotDate},
- {file, File},
- {formatter_config, formatter_config(file)},
- {level, Level},
- {size, RotSize}] ++ Extra}].
-
-default_expected_sinks(UpgradeFile) ->
- [{error_logger_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_channel_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_connection_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_feature_flags_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_federation_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_ldap_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_mirroring_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_osiris_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_prelaunch_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_queue_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_ra_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_shovel_lager_event,
- [{handlers, [{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_upgrade_lager_event,
- [{handlers,
- [{lager_file_backend,
- [{date,[]},
- {file, UpgradeFile},
- {formatter_config, formatter_config(file)},
- {level,info},
- {size,0}]}]},
- {rabbit_handlers,
- [{lager_file_backend,
- [{date,[]},
- {file, UpgradeFile},
- {formatter_config, formatter_config(file)},
- {level,info},
- {size,0}]}]}]}].
-
-env_var_tty(_) ->
- application:set_env(rabbit, lager_log_root, "/tmp/log_base"),
- application:set_env(rabbit, lager_default_file, tty),
- application:set_env(rabbit, lager_upgrade_file, tty),
- %% tty can only be set explicitly
- set_logs_var_origin(environment),
-
- rabbit_lager:configure_lager(),
-
- ExpectedHandlers = tty_expected_handlers(),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, handlers, undefined))),
- ?assertEqual(ExpectedHandlers, sort_handlers(application:get_env(lager, rabbit_handlers, undefined))),
-
- %% Upgrade sink will be different.
- ExpectedSinks = tty_expected_sinks(),
- ?assertEqual(ExpectedSinks, sort_sinks(application:get_env(lager, extra_sinks, undefined))).
-
-set_logs_var_origin(Origin) ->
- Context = #{var_origins => #{main_log_file => Origin}},
- rabbit_prelaunch:store_context(Context),
- ok.
-
-unset_logs_var_origin() ->
- rabbit_prelaunch:clear_context_cache(),
- ok.
-
-tty_expected_handlers() ->
- [{lager_console_backend,
- [{formatter_config, formatter_config(console)},
- {level, debug}]}].
-
-tty_expected_sinks() ->
- [{error_logger_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_channel_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_connection_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_feature_flags_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_federation_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_lager_event,
- [{handlers, [{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers, [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_ldap_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_mirroring_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_osiris_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_prelaunch_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_queue_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_ra_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_shovel_lager_event,
- [{handlers, [{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,
- [{lager_forwarder_backend,[lager_event,info]}]}]},
- {rabbit_log_upgrade_lager_event,
- [{handlers,[{lager_forwarder_backend,[lager_event,info]}]},
- {rabbit_handlers,[{lager_forwarder_backend,[lager_event,info]}]}]}].
-
-sort_sinks(Sinks) ->
- lists:ukeysort(1,
- lists:map(
- fun({Name, Config}) ->
- Handlers = proplists:get_value(handlers, Config),
- RabbitHandlers = proplists:get_value(rabbit_handlers, Config),
- {Name, lists:ukeymerge(1,
- [{handlers, sort_handlers(Handlers)},
- {rabbit_handlers, sort_handlers(RabbitHandlers)}],
- lists:ukeysort(1, Config))}
- end,
- Sinks)).
-
-sort_handlers(Handlers) ->
- lists:keysort(1,
- lists:map(
- fun
- ({Name, [{Atom, _}|_] = Config}) when is_atom(Atom) ->
- {Name, lists:ukeysort(1, Config)};
- %% Non-proplist configuration. forwarder backend
- (Other) ->
- Other
- end,
- Handlers)).
-
-formatter_config(console) ->
- [date," ",time," ",color,"[",severity, "] ", {pid,[]}, " ",message,"\r\n"];
-formatter_config(_) ->
- [date," ",time," ",color,"[",severity, "] ", {pid,[]}, " ",message,"\n"].
-
-syslog_formatter_config() ->
- [color,"[",severity, "] ", {pid,[]}, " ",message,"\n"].
diff --git a/deps/rabbit/test/unit_log_management_SUITE.erl b/deps/rabbit/test/unit_log_management_SUITE.erl
index 9fc9c7839d..195d872bc9 100644
--- a/deps/rabbit/test/unit_log_management_SUITE.erl
+++ b/deps/rabbit/test/unit_log_management_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_log_management_SUITE).
@@ -25,7 +25,6 @@ all() ->
groups() ->
[
{non_parallel_tests, [], [
- log_management,
log_file_initialised_during_startup,
log_file_fails_to_initialise_during_startup,
externally_rotated_logs_are_automatically_reopened
@@ -113,94 +112,6 @@ wait_for_application(Application, Time) ->
%% Log management.
%% -------------------------------------------------------------------
-log_management(Config) ->
- passed = rabbit_ct_broker_helpers:rpc(Config, 0,
- ?MODULE, log_management1, [Config]).
-
-log_management1(_Config) ->
- [LogFile|_] = rabbit:log_locations(),
- Suffix = ".0",
-
- ok = test_logs_working([LogFile]),
-
- %% prepare basic logs
- file:delete(LogFile ++ Suffix),
- ok = test_logs_working([LogFile]),
-
- %% simple log rotation
- ok = rabbit:rotate_logs(),
- %% rabbit:rotate_logs/0 is asynchronous due to a limitation in
- %% Lager. Therefore, we have no choice but to wait an arbitrary
- %% amount of time.
- ok = rabbit_ct_helpers:await_condition(
- fun() ->
- [true, true] =:=
- non_empty_files([LogFile ++ Suffix, LogFile])
- end, 5000),
- ok = test_logs_working([LogFile]),
-
- %% log rotation on empty files
- ok = clean_logs([LogFile], Suffix),
- ok = rabbit:rotate_logs(),
- ok = rabbit_ct_helpers:await_condition(
- fun() ->
- [true, true] =:=
- non_empty_files([LogFile ++ Suffix, LogFile])
- end, 5000),
-
- %% logs with suffix are not writable
- ok = rabbit:rotate_logs(),
- ok = rabbit_ct_helpers:await_condition(
- fun() ->
- ok =:= make_files_non_writable([LogFile ++ Suffix])
- end, 5000),
- ok = rabbit:rotate_logs(),
- ok = rabbit_ct_helpers:await_condition(
- fun() ->
- ok =:= test_logs_working([LogFile])
- end, 5000),
-
- %% rotate when original log files are not writable
- ok = make_files_non_writable([LogFile]),
- ok = rabbit:rotate_logs(),
- timer:sleep(2000),
-
- %% logging directed to tty (first, remove handlers)
- ok = rabbit:stop(),
- ok = make_files_writable([LogFile ++ Suffix]),
- ok = clean_logs([LogFile], Suffix),
- ok = application:set_env(rabbit, lager_default_file, tty),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
- ok = rabbit:start(),
- timer:sleep(200),
- rabbit_log:info("test info"),
-
- %% rotate logs when logging is turned off
- ok = rabbit:stop(),
- ok = clean_logs([LogFile], Suffix),
- ok = application:set_env(rabbit, lager_default_file, false),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
- ok = rabbit:start(),
- timer:sleep(200),
- rabbit_log:error("test error"),
- timer:sleep(200),
- ?assertEqual([{error,enoent}], empty_files([LogFile])),
-
- %% cleanup
- ok = rabbit:stop(),
- ok = clean_logs([LogFile], Suffix),
- ok = application:set_env(rabbit, lager_default_file, LogFile),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
- ok = rabbit:start(),
- ok = test_logs_working([LogFile]),
- passed.
-
log_file_initialised_during_startup(Config) ->
passed = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, log_file_initialised_during_startup1, [Config]).
@@ -212,10 +123,7 @@ log_file_initialised_during_startup1(_Config) ->
%% start application with simple tty logging
ok = rabbit:stop(),
ok = clean_logs([LogFile], Suffix),
- ok = application:set_env(rabbit, lager_default_file, tty),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
+ os:putenv("RABBITMQ_LOGS", "-"),
ok = rabbit:start(),
%% start application with logging to non-existing directory
@@ -224,18 +132,12 @@ log_file_initialised_during_startup1(_Config) ->
delete_file(NonExistent),
delete_file(filename:dirname(NonExistent)),
ok = rabbit:stop(),
- ct:pal("Setting lager_default_file to \"~s\"", [NonExistent]),
- ok = application:set_env(rabbit, lager_default_file, NonExistent),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
+ io:format("Setting log file to \"~s\"~n", [NonExistent]),
+ os:putenv("RABBITMQ_LOGS", NonExistent),
ok = rabbit:start(),
%% clean up
- ok = application:set_env(rabbit, lager_default_file, LogFile),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
+ os:unsetenv("RABBITMQ_LOGS"),
ok = rabbit:start(),
passed.
@@ -277,13 +179,8 @@ log_file_fails_to_initialise_during_startup1(_Config, NonWritableDir) ->
delete_file(filename:dirname(NoPermission1)),
ok = rabbit:stop(),
- ct:pal("Setting lager_default_file to \"~s\"", [NoPermission1]),
- ok = application:set_env(rabbit, lager_default_file, NoPermission1),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
-
- ct:pal("`rabbit` application env.: ~p", [application:get_all_env(rabbit)]),
+ io:format("Setting log file to \"~s\"~n", [NoPermission1]),
+ os:putenv("RABBITMQ_LOGS", NoPermission1),
?assertThrow(
{error, {rabbit, {{cannot_log_to_file, _, _}, _}}},
rabbit:start()),
@@ -296,22 +193,14 @@ log_file_fails_to_initialise_during_startup1(_Config, NonWritableDir) ->
delete_file(NoPermission2),
delete_file(filename:dirname(NoPermission2)),
- ct:pal("Setting lager_default_file to \"~s\"", [NoPermission2]),
- ok = application:set_env(rabbit, lager_default_file, NoPermission2),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
-
- ct:pal("`rabbit` application env.: ~p", [application:get_all_env(rabbit)]),
+ io:format("Setting log file to \"~s\"~n", [NoPermission2]),
+ os:putenv("RABBITMQ_LOGS", NoPermission2),
?assertThrow(
{error, {rabbit, {{cannot_log_to_file, _, _}, _}}},
rabbit:start()),
%% clean up
- ok = application:set_env(rabbit, lager_default_file, LogFile),
- application:unset_env(rabbit, log),
- application:unset_env(lager, handlers),
- application:unset_env(lager, extra_sinks),
+ os:unsetenv("RABBITMQ_LOGS"),
ok = rabbit:start(),
passed.
diff --git a/deps/rabbit/test/unit_operator_policy_SUITE.erl b/deps/rabbit/test/unit_operator_policy_SUITE.erl
index ae3285bb55..dedd6c82af 100644
--- a/deps/rabbit/test/unit_operator_policy_SUITE.erl
+++ b/deps/rabbit/test/unit_operator_policy_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_operator_policy_SUITE).
@@ -21,7 +21,8 @@ all() ->
groups() ->
[
{parallel_tests, [parallel], [
- merge_operator_policy_definitions
+ merge_operator_policy_definitions,
+ conflict_resolution_for_booleans
]}
].
@@ -102,6 +103,54 @@ merge_operator_policy_definitions(_Config) ->
[{definition, [
{<<"message-ttl">>, 3000}
]}])
- ),
+ ).
+
- passed.
+ conflict_resolution_for_booleans(_Config) ->
+ ?assertEqual(
+ [
+ {<<"remote-dc-replicate">>, true}
+ ],
+ rabbit_policy:merge_operator_definitions(
+ #{definition => #{
+ <<"remote-dc-replicate">> => true
+ }},
+ [{definition, [
+ {<<"remote-dc-replicate">>, true}
+ ]}])),
+
+ ?assertEqual(
+ [
+ {<<"remote-dc-replicate">>, false}
+ ],
+ rabbit_policy:merge_operator_definitions(
+ #{definition => #{
+ <<"remote-dc-replicate">> => false
+ }},
+ [{definition, [
+ {<<"remote-dc-replicate">>, false}
+ ]}])),
+
+ ?assertEqual(
+ [
+ {<<"remote-dc-replicate">>, true}
+ ],
+ rabbit_policy:merge_operator_definitions(
+ #{definition => #{
+ <<"remote-dc-replicate">> => false
+ }},
+ [{definition, [
+ {<<"remote-dc-replicate">>, true}
+ ]}])),
+
+ ?assertEqual(
+ [
+ {<<"remote-dc-replicate">>, false}
+ ],
+ rabbit_policy:merge_operator_definitions(
+ #{definition => #{
+ <<"remote-dc-replicate">> => true
+ }},
+ [{definition, [
+ {<<"remote-dc-replicate">>, false}
+ ]}])). \ No newline at end of file
diff --git a/deps/rabbit/test/unit_pg_local_SUITE.erl b/deps/rabbit/test/unit_pg_local_SUITE.erl
index 54fafdd340..9819d78e49 100644
--- a/deps/rabbit/test/unit_pg_local_SUITE.erl
+++ b/deps/rabbit/test/unit_pg_local_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_pg_local_SUITE).
diff --git a/deps/rabbit/test/unit_plugin_directories_SUITE.erl b/deps/rabbit/test/unit_plugin_directories_SUITE.erl
index 1195434fae..d857f99419 100644
--- a/deps/rabbit/test/unit_plugin_directories_SUITE.erl
+++ b/deps/rabbit/test/unit_plugin_directories_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_plugin_directories_SUITE).
diff --git a/deps/rabbit/test/unit_plugin_versioning_SUITE.erl b/deps/rabbit/test/unit_plugin_versioning_SUITE.erl
index 8032becedd..127d11d8c4 100644
--- a/deps/rabbit/test/unit_plugin_versioning_SUITE.erl
+++ b/deps/rabbit/test/unit_plugin_versioning_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_plugin_versioning_SUITE).
diff --git a/deps/rabbit/test/unit_policy_validators_SUITE.erl b/deps/rabbit/test/unit_policy_validators_SUITE.erl
index c340d172af..54dbdd56cf 100644
--- a/deps/rabbit/test/unit_policy_validators_SUITE.erl
+++ b/deps/rabbit/test/unit_policy_validators_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_policy_validators_SUITE).
diff --git a/deps/rabbit/test/unit_priority_queue_SUITE.erl b/deps/rabbit/test/unit_priority_queue_SUITE.erl
index 5587e7d61f..41b7717e27 100644
--- a/deps/rabbit/test/unit_priority_queue_SUITE.erl
+++ b/deps/rabbit/test/unit_priority_queue_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_priority_queue_SUITE).
diff --git a/deps/rabbit/test/unit_queue_consumers_SUITE.erl b/deps/rabbit/test/unit_queue_consumers_SUITE.erl
index 0f48ea65b4..533991dc8e 100644
--- a/deps/rabbit/test/unit_queue_consumers_SUITE.erl
+++ b/deps/rabbit/test/unit_queue_consumers_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_queue_consumers_SUITE).
diff --git a/deps/rabbit/test/unit_stats_and_metrics_SUITE.erl b/deps/rabbit/test/unit_stats_and_metrics_SUITE.erl
index 2ffed514e1..3cfe4ff1ce 100644
--- a/deps/rabbit/test/unit_stats_and_metrics_SUITE.erl
+++ b/deps/rabbit/test/unit_stats_and_metrics_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_stats_and_metrics_SUITE).
diff --git a/deps/rabbit/test/unit_supervisor2_SUITE.erl b/deps/rabbit/test/unit_supervisor2_SUITE.erl
index 50633984e2..235ff64c09 100644
--- a/deps/rabbit/test/unit_supervisor2_SUITE.erl
+++ b/deps/rabbit/test/unit_supervisor2_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_supervisor2_SUITE).
diff --git a/deps/rabbit/test/unit_vm_memory_monitor_SUITE.erl b/deps/rabbit/test/unit_vm_memory_monitor_SUITE.erl
index 193df1f956..c09cd91deb 100644
--- a/deps/rabbit/test/unit_vm_memory_monitor_SUITE.erl
+++ b/deps/rabbit/test/unit_vm_memory_monitor_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_vm_memory_monitor_SUITE).
@@ -21,7 +21,9 @@ groups() ->
[
{sequential_tests, [], [
parse_line_linux,
- set_vm_memory_high_watermark_command
+ set_vm_memory_high_watermark_relative1,
+ set_vm_memory_high_watermark_relative2,
+ set_vm_memory_high_watermark_absolute
]}
].
@@ -75,21 +77,46 @@ parse_line_linux(_Config) ->
{"MemTotal 50296866 ", {'MemTotal', 50296866}}]),
ok.
-set_vm_memory_high_watermark_command(Config) ->
+set_vm_memory_high_watermark_relative1(Config) ->
rabbit_ct_broker_helpers:rpc(Config, 0,
- ?MODULE, set_vm_memory_high_watermark_command1, [Config]).
+ ?MODULE, set_and_verify_vm_memory_high_watermark_relative, [1.0]).
-set_vm_memory_high_watermark_command1(_Config) ->
- MemLimitRatio = 1.0,
+%% an alternative way of setting it via advanced.config, equivalent to the relative1 case above
+set_vm_memory_high_watermark_relative2(Config) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0,
+ ?MODULE, set_and_verify_vm_memory_high_watermark_relative, [{relative, 1.0}]).
+
+set_vm_memory_high_watermark_absolute(Config) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0,
+ %% note: we cannot use 100M here because this function won't do any
+ %% parsing of the argument
+ ?MODULE, set_and_verify_vm_memory_high_watermark_absolute, [{absolute, 104857600}]).
+
+
+set_and_verify_vm_memory_high_watermark_relative(MemLimitRatio) ->
MemTotal = vm_memory_monitor:get_total_memory(),
vm_memory_monitor:set_vm_memory_high_watermark(MemLimitRatio),
MemLimit = vm_memory_monitor:get_memory_limit(),
case MemLimit of
MemTotal -> ok;
- _ -> MemTotalToMemLimitRatio = MemLimit * 100.0 / MemTotal / 100,
+ _ -> MemTotalToMemLimitRatio = (MemLimit * 100) / (MemTotal * 100),
ct:fail(
- "Expected memory high watermark to be ~p (~s), but it was ~p (~.1f)",
+ "Expected memory high watermark to be ~p (~p), but it was ~p (~.1f)",
[MemTotal, MemLimitRatio, MemLimit, MemTotalToMemLimitRatio]
)
end.
+
+set_and_verify_vm_memory_high_watermark_absolute(MemLimit0) ->
+ MemTotal = vm_memory_monitor:get_total_memory(),
+ Interpreted = vm_memory_monitor:interpret_limit(MemLimit0, MemTotal),
+
+ vm_memory_monitor:set_vm_memory_high_watermark(MemLimit0),
+ MemLimit = vm_memory_monitor:get_memory_limit(),
+ case MemLimit of
+ MemTotal -> ok;
+ Interpreted -> ok;
+ _ ->
+ ct:fail("Expected memory high watermark to be ~p but it was ~p", [Interpreted, MemLimit])
+ end,
+ vm_memory_monitor:set_vm_memory_high_watermark(0.4). \ No newline at end of file
diff --git a/deps/rabbit/test/upgrade_preparation_SUITE.erl b/deps/rabbit/test/upgrade_preparation_SUITE.erl
index 880238515a..90d0bddb49 100644
--- a/deps/rabbit/test/upgrade_preparation_SUITE.erl
+++ b/deps/rabbit/test/upgrade_preparation_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(upgrade_preparation_SUITE).
@@ -38,13 +38,21 @@ end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
init_per_group(Group, Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config, [
- {rmq_nodes_count, 3},
- {rmq_nodename_suffix, Group}
- ]),
- rabbit_ct_helpers:run_steps(Config1,
- rabbit_ct_broker_helpers:setup_steps() ++
- rabbit_ct_client_helpers:setup_steps()).
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% in a 3.8/3.9 mixed cluster, ra will not cluster across versions,
+ %% so quorum plus one will not be achieved
+ {skip, "not mixed versions compatible"};
+ _ ->
+ Config1 = rabbit_ct_helpers:set_config(Config,
+ [
+ {rmq_nodes_count, 3},
+ {rmq_nodename_suffix, Group}
+ ]),
+ rabbit_ct_helpers:run_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps())
+ end.
end_per_group(_Group, Config) ->
rabbit_ct_helpers:run_steps(Config,
diff --git a/deps/rabbit/test/vhost_SUITE.erl b/deps/rabbit/test/vhost_SUITE.erl
index 4e6ffe0d74..a86d90f50a 100644
--- a/deps/rabbit/test/vhost_SUITE.erl
+++ b/deps/rabbit/test/vhost_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(vhost_SUITE).
@@ -25,7 +25,8 @@ groups() ->
ClusterSize1Tests = [
single_node_vhost_deletion_forces_connection_closure,
vhost_failure_forces_connection_closure,
- vhost_creation_idempotency
+ vhost_creation_idempotency,
+ parse_tags
],
ClusterSize2Tests = [
cluster_vhost_deletion_forces_connection_closure,
@@ -318,6 +319,17 @@ vhost_creation_idempotency(Config) ->
rabbit_ct_broker_helpers:delete_vhost(Config, VHost)
end.
+parse_tags(Config) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, parse_tags1, [Config]).
+
+parse_tags1(_Config) ->
+ ?assertEqual([], rabbit_vhost:parse_tags(<<"">>)),
+ ?assertEqual([], rabbit_vhost:parse_tags("")),
+ ?assertEqual([], rabbit_vhost:parse_tags([])),
+ ?assertEqual([a, b], rabbit_vhost:parse_tags(<<"a,b">>)),
+ ?assertEqual([a3, b3], rabbit_vhost:parse_tags("a3,b3")),
+ ?assertEqual([tag1, tag2], rabbit_vhost:parse_tags([<<"tag1">>, <<"tag2">>])).
+
%% -------------------------------------------------------------------
%% Helpers
%% -------------------------------------------------------------------
diff --git a/deps/rabbit_common/BUILD.bazel b/deps/rabbit_common/BUILD.bazel
new file mode 100644
index 0000000000..7ecddea9fc
--- /dev/null
+++ b/deps/rabbit_common/BUILD.bazel
@@ -0,0 +1,239 @@
+load(
+ "@bazel-erlang//:bazel_erlang_lib.bzl",
+ "app_file",
+ "bazel_erlang_lib",
+ "erlc",
+)
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load(
+ "//:rabbitmq.bzl",
+ "APP_VERSION",
+ "RABBITMQ_ERLC_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "rabbitmq_suite",
+)
+
+py_binary(
+ name = "codegen",
+ srcs = [
+ "codegen.py",
+ ],
+ imports = ["../../deps/rabbitmq_codegen"],
+ deps = [
+ "//deps/rabbitmq_codegen:amqp_codegen",
+ ],
+)
+
+genrule(
+ name = "generated_headers",
+ srcs = [
+ "//deps/rabbitmq_codegen:amqp-rabbitmq-0.9.1.json",
+ "//deps/rabbitmq_codegen:credit_extension.json",
+ "//deps/rabbitmq_codegen:amqp-rabbitmq-0.8.json",
+ ],
+ outs = ["include/rabbit_framing.hrl"],
+ cmd = "$(location :codegen) --ignore-conflicts header $(SRCS) $@",
+ tools = [":codegen"],
+)
+
+genrule(
+ name = "rabbit_framing_amqp_0_9_1.erl",
+ srcs = [
+ "//deps/rabbitmq_codegen:amqp-rabbitmq-0.9.1.json",
+ "//deps/rabbitmq_codegen:credit_extension.json",
+ ],
+ outs = ["src/rabbit_framing_amqp_0_9_1.erl"],
+ cmd = "$(location :codegen) body $(SRCS) $@",
+ tools = [":codegen"],
+)
+
+genrule(
+ name = "rabbit_framing_amqp_0_8.erl",
+ srcs = [
+ "//deps/rabbitmq_codegen:amqp-rabbitmq-0.8.json",
+ ],
+ outs = ["src/rabbit_framing_amqp_0_8.erl"],
+ cmd = "$(location :codegen) body $(SRCS) $@",
+ tools = [":codegen"],
+)
+
+DEPS = []
+
+RUNTIME_DEPS = [
+ "@jsx//:bazel_erlang_lib",
+ "@recon//:bazel_erlang_lib",
+ "@credentials_obfuscation//:bazel_erlang_lib",
+]
+
+APP_NAME = "rabbit_common"
+
+EXTRA_APPS = [
+ "compiler",
+ "crypto",
+ "public_key",
+ "sasl",
+ "ssl",
+ "syntax_tools",
+ "tools",
+ "xmerl",
+]
+
+HDRS = glob(["include/*.hrl"]) + ["include/rabbit_framing.hrl"]
+
+app_file(
+ name = "app_file",
+ app_description = "Modules shared by rabbitmq-server and rabbitmq-erlang-client",
+ app_name = APP_NAME,
+ app_version = APP_VERSION,
+ extra_apps = EXTRA_APPS,
+ modules = [":beam_files"],
+ deps = DEPS + RUNTIME_DEPS,
+)
+
+FIRST_SRCS = [
+ "src/gen_server2.erl",
+ "src/rabbit_authn_backend.erl",
+ "src/rabbit_authz_backend.erl",
+ "src/rabbit_registry_class.erl",
+]
+
+erlc(
+ name = "first_beam_files",
+ srcs = glob(FIRST_SRCS),
+ hdrs = HDRS,
+ dest = "ebin",
+ erlc_opts = RABBITMQ_ERLC_OPTS,
+ deps = DEPS,
+)
+
+erlc(
+ name = "beam_files",
+ srcs = glob(
+ ["src/*.erl"],
+ exclude = FIRST_SRCS,
+ ) + [
+ "src/rabbit_framing_amqp_0_8.erl",
+ "src/rabbit_framing_amqp_0_9_1.erl",
+ ],
+ hdrs = HDRS,
+ beam = [":first_beam_files"],
+ dest = "ebin",
+ erlc_opts = RABBITMQ_ERLC_OPTS,
+ deps = DEPS,
+)
+
+bazel_erlang_lib(
+ name = "bazel_erlang_lib",
+ hdrs = HDRS,
+ app = ":app_file",
+ app_name = APP_NAME,
+ beam = [
+ ":first_beam_files",
+ ":beam_files",
+ ],
+ visibility = ["//visibility:public"],
+ deps = DEPS + RUNTIME_DEPS,
+)
+
+erlc(
+ name = "first_test_beam_files",
+ testonly = True,
+ srcs = glob(FIRST_SRCS),
+ hdrs = HDRS,
+ dest = "src",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+ deps = DEPS,
+)
+
+erlc(
+ name = "test_beam_files",
+ testonly = True,
+ srcs = glob(
+ ["src/*.erl"],
+ exclude = FIRST_SRCS,
+ ) + [
+ "src/rabbit_framing_amqp_0_8.erl",
+ "src/rabbit_framing_amqp_0_9_1.erl",
+ ],
+ hdrs = HDRS,
+ beam = [":first_test_beam_files"],
+ dest = "src",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+ deps = DEPS,
+)
+
+bazel_erlang_lib(
+ name = "test_bazel_erlang_lib",
+ testonly = True,
+ hdrs = HDRS,
+ app = ":app_file",
+ app_name = APP_NAME,
+ beam = [
+ ":first_test_beam_files",
+ ":test_beam_files",
+ ],
+ visibility = ["//:__subpackages__"],
+ deps = DEPS + RUNTIME_DEPS,
+)
+
+xref(
+ additional_libs = [
+ "@ranch//:bazel_erlang_lib",
+ ],
+ tags = ["xref"],
+)
+
+plt(
+ name = "base_plt",
+ apps = [
+ "mnesia",
+ "crypto",
+ "ssl",
+ ] + EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+suites = [
+ rabbitmq_suite(
+ name = "rabbit_env_SUITE",
+ size = "small",
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "supervisor2_SUITE",
+ size = "small",
+ ),
+ rabbitmq_suite(
+ name = "unit_priority_queue_SUITE",
+ size = "small",
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ size = "medium",
+ additional_srcs = [
+ "test/gen_server2_test_server.erl",
+ ],
+ deps = [
+ "@credentials_obfuscation//:bazel_erlang_lib",
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "worker_pool_SUITE",
+ size = "small",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbit_common/CONTRIBUTING.md b/deps/rabbit_common/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbit_common/CONTRIBUTING.md
+++ b/deps/rabbit_common/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbit_common/Makefile b/deps/rabbit_common/Makefile
index 6a31a9ccbf..6400266753 100644
--- a/deps/rabbit_common/Makefile
+++ b/deps/rabbit_common/Makefile
@@ -16,9 +16,9 @@ define PROJECT_APP_EXTRA_KEYS
endef
LOCAL_DEPS = compiler crypto public_key sasl ssl syntax_tools tools xmerl
-DEPS = lager jsx ranch recon credentials_obfuscation
+DEPS = jsx recon credentials_obfuscation
-dep_credentials_obfuscation = hex 2.2.0
+dep_credentials_obfuscation = git https://github.com/rabbitmq/credentials-obfuscation.git master
# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
# reviewed and merged.
@@ -47,7 +47,7 @@ WITHOUT = plugins/proper
PLT_APPS += mnesia crypto ssl
-include mk/rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
-include development.post.mk
diff --git a/deps/rabbit_common/erlang.mk b/deps/rabbit_common/erlang.mk
deleted file mode 100644
index defddc4865..0000000000
--- a/deps/rabbit_common/erlang.mk
+++ /dev/null
@@ -1,7746 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT = plugins/proper
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbit_common/include/logging.hrl b/deps/rabbit_common/include/logging.hrl
new file mode 100644
index 0000000000..1ad0f040fd
--- /dev/null
+++ b/deps/rabbit_common/include/logging.hrl
@@ -0,0 +1,19 @@
+-define(RMQLOG_SUPER_DOMAIN_NAME, rabbitmq).
+-define(RMQLOG_DOMAIN_GLOBAL, [?RMQLOG_SUPER_DOMAIN_NAME]).
+-define(DEFINE_RMQLOG_DOMAIN(Domain), [?RMQLOG_SUPER_DOMAIN_NAME, Domain]).
+
+-define(RMQLOG_DOMAIN_CHAN, ?DEFINE_RMQLOG_DOMAIN(channel)).
+-define(RMQLOG_DOMAIN_CONN, ?DEFINE_RMQLOG_DOMAIN(connection)).
+-define(RMQLOG_DOMAIN_FEAT_FLAGS, ?DEFINE_RMQLOG_DOMAIN(feature_flags)).
+-define(RMQLOG_DOMAIN_MIRRORING, ?DEFINE_RMQLOG_DOMAIN(mirroring)).
+-define(RMQLOG_DOMAIN_PRELAUNCH, ?DEFINE_RMQLOG_DOMAIN(prelaunch)).
+-define(RMQLOG_DOMAIN_QUEUE, ?DEFINE_RMQLOG_DOMAIN(queue)).
+-define(RMQLOG_DOMAIN_UPGRADE, ?DEFINE_RMQLOG_DOMAIN(upgrade)).
+
+-define(DEFAULT_LOG_LEVEL, info).
+-define(FILTER_NAME, rmqlog_filter).
+
+-define(IS_STD_H_COMPAT(Mod),
+ Mod =:= logger_std_h orelse Mod =:= rabbit_logger_std_h).
+-define(IS_STDDEV(DevName),
+ DevName =:= standard_io orelse DevName =:= standard_error).
diff --git a/deps/rabbit_common/include/rabbit.hrl b/deps/rabbit_common/include/rabbit.hrl
index 707f8099e0..1fb3d4e6ea 100644
--- a/deps/rabbit_common/include/rabbit.hrl
+++ b/deps/rabbit_common/include/rabbit.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-include("resource.hrl").
@@ -168,6 +168,7 @@
node,
vhost,
name,
+ %% Main connection process pid
pid,
protocol,
%% network or direct
@@ -208,7 +209,7 @@
}).
%%----------------------------------------------------------------------------
--define(COPYRIGHT_MESSAGE, "Copyright (c) 2007-2020 VMware, Inc. or its affiliates.").
+-define(COPYRIGHT_MESSAGE, "Copyright (c) 2007-2021 VMware, Inc. or its affiliates.").
-define(INFORMATION_MESSAGE, "Licensed under the MPL 2.0. Website: https://rabbitmq.com").
%% EMPTY_FRAME_SIZE, 8 = 1 + 2 + 4 + 1
@@ -224,9 +225,11 @@
-define(SUPERVISOR_WAIT,
rabbit_misc:get_env(rabbit, supervisor_shutdown_timeout, infinity)).
-define(WORKER_WAIT,
- rabbit_misc:get_env(rabbit, worker_shutdown_timeout, 30000)).
+ rabbit_misc:get_env(rabbit, worker_shutdown_timeout, 300000)).
-define(MSG_STORE_WORKER_WAIT,
rabbit_misc:get_env(rabbit, msg_store_shutdown_timeout, 600000)).
+-define(CLASSIC_QUEUE_WORKER_WAIT,
+ rabbit_misc:get_env(rabbit, classic_queue_shutdown_timeout, 600000)).
-define(HIBERNATE_AFTER_MIN, 1000).
-define(DESIRED_HIBERNATE, 10000).
@@ -265,3 +268,6 @@
%% Execution timeout of connection and channel tracking operations
-define(TRACKING_EXECUTION_TIMEOUT,
rabbit_misc:get_env(rabbit, tracking_execution_timeout, 5000)).
+
+%% 3.6, 3.7, early 3.8
+-define(LEGACY_INDEX_SEGMENT_ENTRY_COUNT, 16384).
diff --git a/deps/rabbit_common/include/rabbit_core_metrics.hrl b/deps/rabbit_common/include/rabbit_core_metrics.hrl
index 17ffa2535b..c3609a655e 100644
--- a/deps/rabbit_common/include/rabbit_core_metrics.hrl
+++ b/deps/rabbit_common/include/rabbit_core_metrics.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% These tables contain the raw metrics as stored by RabbitMQ core
diff --git a/deps/rabbit_common/include/rabbit_memory.hrl b/deps/rabbit_common/include/rabbit_memory.hrl
index c9991550fb..0663330b2f 100644
--- a/deps/rabbit_common/include/rabbit_memory.hrl
+++ b/deps/rabbit_common/include/rabbit_memory.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(DEFAULT_MEMORY_CHECK_INTERVAL, 1000).
diff --git a/deps/rabbit_common/include/rabbit_misc.hrl b/deps/rabbit_common/include/rabbit_misc.hrl
index 98d4051a27..edc49a5843 100644
--- a/deps/rabbit_common/include/rabbit_misc.hrl
+++ b/deps/rabbit_common/include/rabbit_misc.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(RPC_TIMEOUT, 15000).
diff --git a/deps/rabbit_common/include/rabbit_msg_store.hrl b/deps/rabbit_common/include/rabbit_msg_store.hrl
index 9d184ae153..97a64b242e 100644
--- a/deps/rabbit_common/include/rabbit_msg_store.hrl
+++ b/deps/rabbit_common/include/rabbit_msg_store.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-include("rabbit.hrl").
diff --git a/deps/rabbit_common/include/resource.hrl b/deps/rabbit_common/include/resource.hrl
index 5b2697f4d0..1bd9b0a421 100644
--- a/deps/rabbit_common/include/resource.hrl
+++ b/deps/rabbit_common/include/resource.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-record(resource, {
diff --git a/deps/rabbit_common/mk/rabbitmq-build.mk b/deps/rabbit_common/mk/rabbitmq-build.mk
index 2fedcf629b..9e17a5badc 100644
--- a/deps/rabbit_common/mk/rabbitmq-build.mk
+++ b/deps/rabbit_common/mk/rabbitmq-build.mk
@@ -18,24 +18,7 @@ ifneq ($(filter-out rabbit_common amqp_client,$(PROJECT)),)
RMQ_ERLC_OPTS += -pa $(DEPS_DIR)/rabbitmq_cli/_build/dev/lib/rabbitmqctl/ebin
endif
-# Add Lager parse_transform module and our default Lager extra sinks.
-LAGER_EXTRA_SINKS += rabbit_log \
- rabbit_log_channel \
- rabbit_log_connection \
- rabbit_log_feature_flags \
- rabbit_log_federation \
- rabbit_log_ldap \
- rabbit_log_mirroring \
- rabbit_log_osiris \
- rabbit_log_prelaunch \
- rabbit_log_queue \
- rabbit_log_ra \
- rabbit_log_shovel \
- rabbit_log_upgrade
-lager_extra_sinks = $(subst $(space),$(comma),$(LAGER_EXTRA_SINKS))
-
-RMQ_ERLC_OPTS += +'{parse_transform,lager_transform}' \
- +'{lager_extra_sinks,[$(lager_extra_sinks)]}'
+RMQ_ERLC_OPTS += +deterministic
# Push our compilation options to both the normal and test ERLC_OPTS.
ERLC_OPTS += $(RMQ_ERLC_OPTS)
diff --git a/deps/rabbit_common/mk/rabbitmq-components.mk b/deps/rabbit_common/mk/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbit_common/mk/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbit_common/mk/rabbitmq-dist.mk b/deps/rabbit_common/mk/rabbitmq-dist.mk
index 3e17a27939..bc126e963b 100644
--- a/deps/rabbit_common/mk/rabbitmq-dist.mk
+++ b/deps/rabbit_common/mk/rabbitmq-dist.mk
@@ -152,8 +152,7 @@ $(error DIST_PLUGINS_LIST ($(DIST_PLUGINS_LIST)) is missing)
endif
$(eval $(foreach path, \
- $(filter-out %/looking_glass %/lz4, \
- $(sort $(shell cat $(DIST_PLUGINS_LIST))) $(CURDIR)), \
+ $(sort $(shell cat $(DIST_PLUGINS_LIST))) $(CURDIR), \
$(call ez_target,$(if $(filter $(path),$(CURDIR)),$(PROJECT),$(notdir $(path))),$(path))))
endif
endif
@@ -186,10 +185,13 @@ $(ERLANGMK_DIST_EZS):
$(call core_unix_path,$(SRC_DIR))/ $(call core_unix_path,$(EZ_DIR))/
@# Give a chance to the application to make any modification it
@# wants to the tree before we make an archive.
- $(verbose) ! (test -f $(SRC_DIR)/rabbitmq-components.mk \
- && grep -q '^prepare-dist::' $(SRC_DIR)/Makefile) || \
+ifneq ($(RABBITMQ_COMPONENTS),)
+ifneq ($(filter $(PROJECT),$(RABBITMQ_COMPONENTS)),)
+ $(verbose) ! (grep -q '^prepare-dist::' $(SRC_DIR)/Makefile) || \
$(MAKE) --no-print-directory -C $(SRC_DIR) prepare-dist \
APP=$(APP) VSN=$(VSN) EZ_DIR=$(EZ_DIR)
+endif
+endif
ifneq ($(DIST_AS_EZS),)
$(verbose) (cd $(DIST_DIR) && \
find "$(basename $(notdir $@))" | LC_COLLATE=C sort \
@@ -263,7 +265,7 @@ do-dist:: $(DIST_EZS)
CLI_SCRIPTS_LOCK = $(CLI_SCRIPTS_DIR).lock
CLI_ESCRIPTS_LOCK = $(CLI_ESCRIPTS_DIR).lock
-ifneq ($(filter-out rabbit_common amqp10_common,$(PROJECT)),)
+ifneq ($(filter-out rabbit_common amqp10_common rabbitmq_stream_common,$(PROJECT)),)
dist:: install-cli
test-build:: install-cli
endif
diff --git a/deps/rabbit_common/mk/rabbitmq-early-test.mk b/deps/rabbit_common/mk/rabbitmq-early-test.mk
index f4f00173b3..9f4f325d4d 100644
--- a/deps/rabbit_common/mk/rabbitmq-early-test.mk
+++ b/deps/rabbit_common/mk/rabbitmq-early-test.mk
@@ -109,7 +109,6 @@ endif
ifneq ("$(RABBITMQ_TRACER)","")
BUILD_DEPS += looking_glass
-dep_looking_glass = git https://github.com/rabbitmq/looking-glass master
ERL_LIBS := "$(ERL_LIBS):../looking_glass:../lz4"
export RABBITMQ_TRACER
endif
diff --git a/deps/rabbit_common/mk/rabbitmq-hexpm.mk b/deps/rabbit_common/mk/rabbitmq-hexpm.mk
index 24281b1321..bfe559608d 100644
--- a/deps/rabbit_common/mk/rabbitmq-hexpm.mk
+++ b/deps/rabbit_common/mk/rabbitmq-hexpm.mk
@@ -30,35 +30,30 @@ define RABBITMQ_HEXPM_DEFAULT_FILES
"src"
endef
-ifeq ($(PROJECT),rabbit_common)
-RMQ_COMPONENTS_PREFIX = mk
-RMQ_COMPONENTS_HEXPM = mk/rabbitmq-components.hexpm.mk
-else
-RMQ_COMPONENTS_PREFIX = .
+RMQ_COMPONENTS_PLAIN = $(DEPS_DIR)/../rabbitmq-components.mk
RMQ_COMPONENTS_HEXPM = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.hexpm.mk
-endif
hex-publish: $(HEXPM_CLI) app rebar.config
$(gen_verbose) echo "$(PROJECT_DESCRIPTION) $(PROJECT_VERSION)" \
> git-revisions.txt
$(verbose) mv \
- $(RMQ_COMPONENTS_PREFIX)/rabbitmq-components.mk \
+ $(RMQ_COMPONENTS_PLAIN) \
rabbitmq-components.mk.not-hexpm
$(verbose) cp \
$(RMQ_COMPONENTS_HEXPM) \
- $(RMQ_COMPONENTS_PREFIX)/rabbitmq-components.mk
+ $(RMQ_COMPONENTS_PLAIN)
$(verbose) grep -E '^dep.* = hex' \
rabbitmq-components.mk.not-hexpm \
- >> $(RMQ_COMPONENTS_PREFIX)/rabbitmq-components.mk
+ >> $(RMQ_COMPONENTS_PLAIN)
$(verbose) touch -r \
rabbitmq-components.mk.not-hexpm \
- $(RMQ_COMPONENTS_PREFIX)/rabbitmq-components.mk
+ $(RMQ_COMPONENTS_PLAIN)
$(verbose) trap '\
rm -f git-revisions.txt rebar.lock; \
if test -f rabbitmq-components.mk.not-hexpm; then \
mv \
rabbitmq-components.mk.not-hexpm \
- $(RMQ_COMPONENTS_PREFIX)/rabbitmq-components.mk; \
+ $(RMQ_COMPONENTS_PLAIN); \
fi' EXIT INT; \
$(HEXPM_CLI) publish
diff --git a/deps/rabbit_common/mk/rabbitmq-run.mk b/deps/rabbit_common/mk/rabbitmq-run.mk
index bef62c03f7..1c9edc8074 100644
--- a/deps/rabbit_common/mk/rabbitmq-run.mk
+++ b/deps/rabbit_common/mk/rabbitmq-run.mk
@@ -67,9 +67,13 @@ node_enabled_plugins_file = $(call node_tmpdir,$(1))/enabled_plugins
# Broker startup variables for the test environment.
ifeq ($(PLATFORM),msys2)
-HOSTNAME := $(COMPUTERNAME)
+HOSTNAME = $(COMPUTERNAME)
else
-HOSTNAME := $(shell hostname -s)
+ifeq ($(PLATFORM),solaris)
+HOSTNAME = $(shell hostname | sed 's@\..*@@')
+else
+HOSTNAME = $(shell hostname -s)
+endif
endif
RABBITMQ_NODENAME ?= rabbit@$(HOSTNAME)
@@ -104,6 +108,8 @@ else
RMQ_PLUGINS_DIR=$(CURDIR)/$(DIST_DIR)
endif
+node_plugins_dir = $(if $(RABBITMQ_PLUGINS_DIR),$(RABBITMQ_PLUGINS_DIR),$(if $(EXTRA_PLUGINS_DIR),$(EXTRA_PLUGINS_DIR):$(RMQ_PLUGINS_DIR),$(RMQ_PLUGINS_DIR)))
+
define basic_script_env_settings
MAKE="$(MAKE)" \
ERL_LIBS="$(DIST_ERL_LIBS)" \
@@ -118,7 +124,7 @@ RABBITMQ_MNESIA_DIR="$(call node_mnesia_dir,$(2))" \
RABBITMQ_QUORUM_DIR="$(call node_quorum_dir,$(2))" \
RABBITMQ_STREAM_DIR="$(call node_stream_dir,$(2))" \
RABBITMQ_FEATURE_FLAGS_FILE="$(call node_feature_flags_file,$(2))" \
-RABBITMQ_PLUGINS_DIR="$(if $(RABBITMQ_PLUGINS_DIR),$(RABBITMQ_PLUGINS_DIR),$(RMQ_PLUGINS_DIR))" \
+RABBITMQ_PLUGINS_DIR="$(call node_plugins_dir)" \
RABBITMQ_PLUGINS_EXPAND_DIR="$(call node_plugins_expand_dir,$(2))" \
RABBITMQ_SERVER_START_ARGS="-ra wal_sync_method sync $(RABBITMQ_SERVER_START_ARGS)" \
RABBITMQ_ENABLED_PLUGINS="$(RABBITMQ_ENABLED_PLUGINS)"
@@ -164,9 +170,7 @@ define test_rabbitmq_config
[
{rabbit, [
$(if $(RABBITMQ_NODE_PORT), {tcp_listeners$(comma) [$(RABBITMQ_NODE_PORT)]}$(comma),)
- {loopback_users, []},
- {log, [{file, [{level, debug}]},
- {console, [{level, debug}]}]}
+ {loopback_users, []}
]},
{rabbitmq_management, [
$(if $(RABBITMQ_NODE_PORT), {listener$(comma) [{port$(comma) $(shell echo "$$(($(RABBITMQ_NODE_PORT) + 10000))")}]},)
@@ -174,26 +178,31 @@ $(if $(RABBITMQ_NODE_PORT), {listener$(comma) [{port$(comma) $(shell echo "
{rabbitmq_mqtt, [
$(if $(RABBITMQ_NODE_PORT), {tcp_listeners$(comma) [$(shell echo "$$((1883 + $(RABBITMQ_NODE_PORT) - 5672))")]},)
]},
+ {rabbitmq_web_mqtt, [
+$(if $(RABBITMQ_NODE_PORT), {tcp_config$(comma) [{port$(comma) $(shell echo "$$((15675 + $(RABBITMQ_NODE_PORT) - 5672))")}]},)
+ ]},
+ {rabbitmq_web_mqtt_examples, [
+$(if $(RABBITMQ_NODE_PORT), {listener$(comma) [{port$(comma) $(shell echo "$$((15670 + $(RABBITMQ_NODE_PORT) - 5672))")}]},)
+ ]},
{rabbitmq_stomp, [
$(if $(RABBITMQ_NODE_PORT), {tcp_listeners$(comma) [$(shell echo "$$((61613 + $(RABBITMQ_NODE_PORT) - 5672))")]},)
]},
+ {rabbitmq_web_stomp, [
+$(if $(RABBITMQ_NODE_PORT), {tcp_config$(comma) [{port$(comma) $(shell echo "$$((15674 + $(RABBITMQ_NODE_PORT) - 5672))")}]},)
+ ]},
+ {rabbitmq_web_stomp_examples, [
+$(if $(RABBITMQ_NODE_PORT), {listener$(comma) [{port$(comma) $(shell echo "$$((15670 + $(RABBITMQ_NODE_PORT) - 5672))")}]},)
+ ]},
+ {rabbitmq_stream, [
+$(if $(RABBITMQ_NODE_PORT), {tcp_listeners$(comma) [$(shell echo "$$((5552 + $(RABBITMQ_NODE_PORT) - 5672))")]},)
+ ]},
+ {rabbitmq_prometheus, [
+$(if $(RABBITMQ_NODE_PORT), {tcp_config$(comma) [{port$(comma) $(shell echo "$$((15692 + $(RABBITMQ_NODE_PORT) - 5672))")}]},)
+ ]},
{ra, [
{data_dir, "$(RABBITMQ_QUORUM_DIR)"},
{wal_sync_method, sync}
]},
- {lager, [
- {colors, [
- %% https://misc.flogisoft.com/bash/tip_colors_and_formatting
- {debug, "\\\e[0;34m" },
- {info, "\\\e[1;37m" },
- {notice, "\\\e[1;36m" },
- {warning, "\\\e[1;33m" },
- {error, "\\\e[1;31m" },
- {critical, "\\\e[1;35m" },
- {alert, "\\\e[1;44m" },
- {emergency, "\\\e[1;41m" }
- ]}
- ]},
{osiris, [
{data_dir, "$(RABBITMQ_STREAM_DIR)"}
]}
@@ -206,8 +215,6 @@ define test_rabbitmq_config_with_tls
[
{rabbit, [
{loopback_users, []},
- {log, [{file, [{level, debug}]},
- {console, [{level, debug}]}]},
{ssl_listeners, [5671]},
{ssl_options, [
{cacertfile, "$(TEST_TLS_CERTS_DIR_in_config)/testca/cacert.pem"},
@@ -234,19 +241,6 @@ define test_rabbitmq_config_with_tls
{data_dir, "$(RABBITMQ_QUORUM_DIR)"},
{wal_sync_method, sync}
]},
- {lager, [
- {colors, [
- %% https://misc.flogisoft.com/bash/tip_colors_and_formatting
- {debug, "\\\e[0;34m" },
- {info, "\\\e[1;37m" },
- {notice, "\\\e[1;36m" },
- {warning, "\\\e[1;33m" },
- {error, "\\\e[1;31m" },
- {critical, "\\\e[1;35m" },
- {alert, "\\\e[1;44m" },
- {emergency, "\\\e[1;41m" }
- ]}
- ]},
{osiris, [
{data_dir, "$(RABBITMQ_STREAM_DIR)"}
]}
@@ -286,8 +280,8 @@ endif
endif
run-broker run-tls-broker: RABBITMQ_CONFIG_FILE := $(basename $(TEST_CONFIG_FILE))
-run-broker: config := $(test_rabbitmq_config)
-run-tls-broker: config := $(test_rabbitmq_config_with_tls)
+run-broker: config = $(test_rabbitmq_config)
+run-tls-broker: config = $(test_rabbitmq_config_with_tls)
run-tls-broker: $(TEST_TLS_CERTS_DIR)
run-broker run-tls-broker: node-tmpdir $(DIST_TARGET) $(TEST_CONFIG_FILE)
@@ -377,7 +371,7 @@ stop-node:
# Start a RabbitMQ cluster in the background.
# --------------------------------------------------------------------
-NODES ?= 2
+NODES ?= 3
start-brokers start-cluster: $(DIST_TARGET)
@for n in $$(seq $(NODES)); do \
@@ -390,9 +384,13 @@ start-brokers start-cluster: $(DIST_TARGET)
-rabbit loopback_users [] \
-rabbitmq_management listener [{port,$$((15672 + $$n - 1))}] \
-rabbitmq_mqtt tcp_listeners [$$((1883 + $$n - 1))] \
+ -rabbitmq_web_mqtt tcp_config [{port,$$((1893 + $$n - 1))}] \
+ -rabbitmq_web_mqtt_examples listener [{port,$$((1903 + $$n - 1))}] \
-rabbitmq_stomp tcp_listeners [$$((61613 + $$n - 1))] \
+ -rabbitmq_web_stomp tcp_config [{port,$$((61623 + $$n - 1))}] \
+ -rabbitmq_web_stomp_examples listener [{port,$$((61633 + $$n - 1))}] \
-rabbitmq_prometheus tcp_config [{port,$$((15692 + $$n - 1))}] \
- -rabbitmq_stream tcp_listeners [$$((5555 + $$n - 1))] \
+ -rabbitmq_stream tcp_listeners [$$((5552 + $$n - 1))] \
"; \
if test '$@' = 'start-cluster' && test "$$nodename1"; then \
ERL_LIBS="$(DIST_ERL_LIBS)" \
diff --git a/deps/rabbit_common/mk/rabbitmq-tools.mk b/deps/rabbit_common/mk/rabbitmq-tools.mk
index 6672153cb0..480d8c5d07 100644
--- a/deps/rabbit_common/mk/rabbitmq-tools.mk
+++ b/deps/rabbit_common/mk/rabbitmq-tools.mk
@@ -1,7 +1,11 @@
ifeq ($(PLATFORM),msys2)
-HOSTNAME := $(COMPUTERNAME)
+HOSTNAME = $(COMPUTERNAME)
else
-HOSTNAME := $(shell hostname -s)
+ifeq ($(PLATFORM),solaris)
+HOSTNAME = $(shell hostname | sed 's@\..*@@')
+else
+HOSTNAME = $(shell hostname -s)
+endif
endif
READY_DEPS = $(foreach DEP,\
@@ -25,67 +29,6 @@ show-current-git-fetch-url:
show-current-git-push-url:
@echo $(RABBITMQ_CURRENT_PUSH_URL)
-.PHONY: update-erlang-mk update-rabbitmq-components.mk
-
-update-erlang-mk: erlang-mk
- $(verbose) if test "$(DO_COMMIT)" = 'yes'; then \
- git diff --quiet -- erlang.mk \
- || git commit -m 'Update erlang.mk' -- erlang.mk; \
- fi
- $(verbose) for repo in $(READY_DEPS:%=$(DEPS_DIR)/%); do \
- ! test -f $$repo/erlang.mk \
- || $(MAKE) -C $$repo erlang-mk; \
- if test "$(DO_COMMIT)" = 'yes'; then \
- (cd $$repo; \
- git diff --quiet -- erlang.mk \
- || git commit -m 'Update erlang.mk' -- erlang.mk); \
- fi; \
- done
-
-# --------------------------------------------------------------------
-# rabbitmq-components.mk checks.
-# --------------------------------------------------------------------
-
-UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk
-
-ifeq ($(PROJECT),rabbit_common)
-check-rabbitmq-components.mk:
- @:
-else
-check-rabbitmq-components.mk:
- $(verbose) cmp -s rabbitmq-components.mk \
- $(UPSTREAM_RMQ_COMPONENTS_MK) || \
- (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \
- false)
-endif
-
-ifeq ($(PROJECT),rabbit_common)
-rabbitmq-components-mk:
- @:
-else
-rabbitmq-components-mk:
-ifeq ($(FORCE),yes)
- $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
-else
- $(gen_verbose) if test -d .git && test -d $(DEPS_DIR)/rabbit_common/.git; then \
- upstream_branch=$$(LANG=C git -C $(DEPS_DIR)/rabbit_common branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}'); \
- local_branch=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}'); \
- test "$$local_branch" = "$$upstream_branch" || exit 0; \
- fi; \
- cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) .
-endif
-ifeq ($(DO_COMMIT),yes)
- $(verbose) git diff --quiet rabbitmq-components.mk \
- || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk
-endif
-endif
-
-update-rabbitmq-components-mk: rabbitmq-components-mk
- $(verbose) for repo in $(READY_DEPS:%=$(DEPS_DIR)/%); do \
- ! test -f $$repo/rabbitmq-components.mk \
- || $(MAKE) -C $$repo rabbitmq-components-mk; \
- done
-
update-contributor-code-of-conduct:
$(verbose) for repo in $(READY_DEPS:%=$(DEPS_DIR)/%); do \
cp $(DEPS_DIR)/rabbit_common/CODE_OF_CONDUCT.md $$repo/CODE_OF_CONDUCT.md; \
diff --git a/deps/rabbit_common/src/app_utils.erl b/deps/rabbit_common/src/app_utils.erl
index df965575be..d1476f3596 100644
--- a/deps/rabbit_common/src/app_utils.erl
+++ b/deps/rabbit_common/src/app_utils.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(app_utils).
diff --git a/deps/rabbit_common/src/code_version.erl b/deps/rabbit_common/src/code_version.erl
index 76e9c75c7f..16889e3c2e 100644
--- a/deps/rabbit_common/src/code_version.erl
+++ b/deps/rabbit_common/src/code_version.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(code_version).
diff --git a/deps/rabbit_common/src/credit_flow.erl b/deps/rabbit_common/src/credit_flow.erl
index da1d9606c1..f8adc788f1 100644
--- a/deps/rabbit_common/src/credit_flow.erl
+++ b/deps/rabbit_common/src/credit_flow.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(credit_flow).
diff --git a/deps/rabbit_common/src/delegate.erl b/deps/rabbit_common/src/delegate.erl
index a73d5e64b1..b2ea3c499c 100644
--- a/deps/rabbit_common/src/delegate.erl
+++ b/deps/rabbit_common/src/delegate.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(delegate).
@@ -36,6 +36,20 @@
%% the pool is configurable, the aim is to make sure we don't have too
%% few delegates and thus limit performance on many-CPU machines.
+%% There are some optimisations applied.
+%% If a message is sent to only one queue (a common scenario),
+%% sending them over the delegate mechanism is redundant.
+%% This optimization is applied to gen_server2 module calls when
+%% delegate prefix matches the default, ?DEFAULT_NAME.
+%%
+%% Coonsider two examples:
+%%
+%% 1. "delegate:invoke(Pids, {erlang, process_info, [memory]})", "erlang:process_info/1"
+%% should be called inside the target node.
+%% 2. "{Results, Errors} = delegate:invoke(MemberPids, ?DELEGATE_PREFIX, FunOrMFA)",
+%% Since this operation specifically specifies a delegate name rather than
+%% relying on ?DEFAULT_NAME, it will be invoked using the delegate mechanism.
+
-behaviour(gen_server2).
-export([start_link/1, start_link/2, invoke_no_result/2,
@@ -77,11 +91,21 @@ start_link(Name, Num) ->
Name1 = delegate_name(Name, Num),
gen_server2:start_link({local, Name1}, ?MODULE, [Name1], []).
+invoke(Pid, FunOrMFA = {gen_server2, _F, _A}) when is_pid(Pid) -> %% optimisation
+ case safe_invoke(Pid, FunOrMFA) of
+ {ok, _, Result} -> Result;
+ {error, _, {Class, Reason, StackTrace}} -> erlang:raise(Class, Reason, StackTrace)
+ end;
invoke(Pid, FunOrMFA) ->
invoke(Pid, ?DEFAULT_NAME, FunOrMFA).
invoke(Pid, _Name, FunOrMFA) when is_pid(Pid) andalso node(Pid) =:= node() ->
apply1(FunOrMFA, Pid);
+invoke(Pid, ?DEFAULT_NAME, FunOrMFA = {gen_server2, _F, _A}) when is_pid(Pid) -> %% optimisation
+ case safe_invoke(Pid, FunOrMFA) of
+ {ok, _, Result} -> Result;
+ {error, _, {Class, Reason, StackTrace}} -> erlang:raise(Class, Reason, StackTrace)
+ end;
invoke(Pid, Name, FunOrMFA) when is_pid(Pid) ->
case invoke([Pid], Name, FunOrMFA) of
{[{Pid, Result}], []} ->
@@ -92,13 +116,24 @@ invoke(Pid, Name, FunOrMFA) when is_pid(Pid) ->
invoke([], _Name, _FunOrMFA) -> %% optimisation
{[], []};
+invoke([Pid], ?DEFAULT_NAME, FunOrMFA = {gen_server2, _F, _A}) when is_pid(Pid) -> %% optimisation
+ case safe_invoke(Pid, FunOrMFA) of
+ {ok, _, Result} -> {[{Pid, Result}], []};
+ {error, _, Error} -> {[], [{Pid, Error}]}
+ end;
invoke([Pid], _Name, FunOrMFA) when node(Pid) =:= node() -> %% optimisation
case safe_invoke(Pid, FunOrMFA) of
{ok, _, Result} -> {[{Pid, Result}], []};
{error, _, Error} -> {[], [{Pid, Error}]}
end;
+invoke(Pids, Name = ?DEFAULT_NAME, FunOrMFA = {gen_server2, _F, _A}) when is_list(Pids) ->
+ {LocalCallPids, Grouped} = group_local_call_pids_by_node(Pids),
+ invoke(Pids, Name, FunOrMFA, LocalCallPids, Grouped);
invoke(Pids, Name, FunOrMFA) when is_list(Pids) ->
{LocalPids, Grouped} = group_pids_by_node(Pids),
+ invoke(Pids, Name, FunOrMFA, LocalPids, Grouped).
+
+invoke(Pids, Name, FunOrMFA, LocalCallPids, Grouped) when is_list(Pids) ->
%% The use of multi_call is only safe because the timeout is
%% infinity, and thus there is no process spawned in order to do
%% the sending. Thus calls can't overtake preceding calls/casts.
@@ -112,7 +147,7 @@ invoke(Pids, Name, FunOrMFA) when is_list(Pids) ->
BadPids = [{Pid, {exit, {nodedown, BadNode}, []}} ||
BadNode <- BadNodes,
Pid <- maps:get(BadNode, Grouped)],
- ResultsNoNode = lists:append([safe_invoke(LocalPids, FunOrMFA) |
+ ResultsNoNode = lists:append([safe_invoke(LocalCallPids, FunOrMFA) |
[Results || {_Node, Results} <- Replies]]),
lists:foldl(
fun ({ok, Pid, Result}, {Good, Bad}) -> {[{Pid, Result} | Good], Bad};
@@ -134,6 +169,9 @@ demonitor(Ref) when is_reference(Ref) ->
demonitor({Name, Pid}) ->
gen_server2:cast(Name, {demonitor, self(), Pid}).
+invoke_no_result(Pid, FunOrMFA = {gen_server2, _F, _A}) when is_pid(Pid) ->
+ _ = safe_invoke(Pid, FunOrMFA), %% we don't care about any error
+ ok;
invoke_no_result(Pid, FunOrMFA) when is_pid(Pid) andalso node(Pid) =:= node() ->
%% Optimization, avoids calling invoke_no_result/3.
%%
@@ -154,6 +192,9 @@ invoke_no_result(Pid, FunOrMFA) when is_pid(Pid) ->
ok;
invoke_no_result([], _FunOrMFA) -> %% optimisation
ok;
+invoke_no_result([Pid], FunOrMFA = {gen_server2, _F, _A}) when is_pid(Pid) -> %% optimisation
+ _ = safe_invoke(Pid, FunOrMFA), %% must not die
+ ok;
invoke_no_result([Pid], FunOrMFA) when node(Pid) =:= node() -> %% optimisation
_ = safe_invoke(Pid, FunOrMFA), %% must not die
ok;
@@ -163,15 +204,21 @@ invoke_no_result([Pid], FunOrMFA) ->
{invoke, FunOrMFA,
maps:from_list([{RemoteNode, [Pid]}])}),
ok;
+invoke_no_result(Pids, FunOrMFA = {gen_server2, _F, _A}) when is_list(Pids) ->
+ {LocalCallPids, Grouped} = group_local_call_pids_by_node(Pids),
+ invoke_no_result(Pids, FunOrMFA, LocalCallPids, Grouped);
invoke_no_result(Pids, FunOrMFA) when is_list(Pids) ->
{LocalPids, Grouped} = group_pids_by_node(Pids),
+ invoke_no_result(Pids, FunOrMFA, LocalPids, Grouped).
+
+invoke_no_result(Pids, FunOrMFA, LocalCallPids, Grouped) when is_list(Pids) ->
case maps:keys(Grouped) of
[] -> ok;
RemoteNodes -> gen_server2:abcast(
RemoteNodes, delegate(self(), ?DEFAULT_NAME, RemoteNodes),
{invoke, FunOrMFA, Grouped})
end,
- _ = safe_invoke(LocalPids, FunOrMFA), %% must not die
+ _ = safe_invoke(LocalCallPids, FunOrMFA), %% must not die
ok.
%%----------------------------------------------------------------------------
@@ -187,6 +234,19 @@ group_pids_by_node(Pids) ->
node(Pid), fun (List) -> [Pid | List] end, [Pid], Remote)}
end, {[], maps:new()}, Pids).
+group_local_call_pids_by_node(Pids) ->
+ {LocalPids0, Grouped0} = group_pids_by_node(Pids),
+ maps:fold(fun(K, V, {AccIn, MapsIn}) ->
+ case V of
+ %% just one Pid for the node
+ [SinglePid] -> {[SinglePid | AccIn], MapsIn};
+ %% If the value is a list of more than one pid,
+ %% the (K,V) will be put into the new map which will be called
+ %% through delegate to reduce inter-node communication.
+ _ -> {AccIn, maps:update_with(K, fun(V1) -> V1 end, V, MapsIn)}
+ end
+ end, {LocalPids0, maps:new()}, Grouped0).
+
delegate_name(Name, Hash) ->
list_to_atom(Name ++ integer_to_list(Hash)).
diff --git a/deps/rabbit_common/src/delegate_sup.erl b/deps/rabbit_common/src/delegate_sup.erl
index b92e1eaa46..d20fb038c5 100644
--- a/deps/rabbit_common/src/delegate_sup.erl
+++ b/deps/rabbit_common/src/delegate_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(delegate_sup).
diff --git a/deps/rabbit_common/src/file_handle_cache.erl b/deps/rabbit_common/src/file_handle_cache.erl
index 9220f40ce4..50ea181d14 100644
--- a/deps/rabbit_common/src/file_handle_cache.erl
+++ b/deps/rabbit_common/src/file_handle_cache.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(file_handle_cache).
@@ -1083,8 +1083,8 @@ init([AlarmSet, AlarmClear]) ->
end
end,
ObtainLimit = obtain_limit(Limit),
- error_logger:info_msg("Limiting to approx ~p file handles (~p sockets)~n",
- [Limit, ObtainLimit]),
+ logger:info("Limiting to approx ~p file handles (~p sockets)",
+ [Limit, ObtainLimit]),
Clients = ets:new(?CLIENT_ETS_TABLE, [set, private, {keypos, #cstate.pid}]),
Elders = ets:new(?ELDERS_ETS_TABLE, [set, private]),
{ok, #fhc_state { elders = Elders,
diff --git a/deps/rabbit_common/src/file_handle_cache_stats.erl b/deps/rabbit_common/src/file_handle_cache_stats.erl
index e36a4b38dc..c5122112fa 100644
--- a/deps/rabbit_common/src/file_handle_cache_stats.erl
+++ b/deps/rabbit_common/src/file_handle_cache_stats.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(file_handle_cache_stats).
diff --git a/deps/rabbit_common/src/gen_server2.erl b/deps/rabbit_common/src/gen_server2.erl
index b80e921a89..2b7f03ef10 100644
--- a/deps/rabbit_common/src/gen_server2.erl
+++ b/deps/rabbit_common/src/gen_server2.erl
@@ -93,7 +93,7 @@
%%
%% 11) Internal buffer length is emitted as a core [RabbitMQ] metric.
-%% All modifications are (C) 2009-2020 VMware, Inc. or its affiliates.
+%% All modifications are (C) 2009-2021 VMware, Inc. or its affiliates.
%% ``The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1365,7 +1365,7 @@ format_status(Opt, StatusData) ->
Name
end,
Header = lists:concat(["Status for generic server ", NameTag]),
- Log = sys:get_debug(log, Debug, []),
+ Log = sys:get_log(Debug),
Specfic = callback(Mod, format_status, [Opt, [PDict, State]],
fun () -> [{data, [{"State", State}]}] end),
Messages = callback(Mod, format_message_queue, [Opt, Queue],
diff --git a/deps/rabbit_common/src/lager_forwarder_backend.erl b/deps/rabbit_common/src/lager_forwarder_backend.erl
deleted file mode 100644
index 936a1259ce..0000000000
--- a/deps/rabbit_common/src/lager_forwarder_backend.erl
+++ /dev/null
@@ -1,120 +0,0 @@
-%% This Source Code Form is subject to the terms of the Mozilla Public
-%% License, v. 2.0. If a copy of the MPL was not distributed with this
-%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
-%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-%%
-
--module(lager_forwarder_backend).
-
--behaviour(gen_event).
-
--export([init/1, handle_call/2, handle_event/2, handle_info/2, terminate/2,
- code_change/3]).
-
--record(state, {
- next_sink :: atom(),
- level :: {'mask', integer()} | inherit
- }).
-
-%% @private
-init(Sink) when is_atom(Sink) ->
- init([Sink]);
-init([Sink]) when is_atom(Sink) ->
- init([Sink, inherit]);
-init([Sink, inherit]) when is_atom(Sink) ->
- {ok, #state{
- next_sink = Sink,
- level = inherit
- }};
-init([Sink, Level]) when is_atom(Sink) ->
- try
- Mask = lager_util:config_to_mask(Level),
- {ok, #state{
- next_sink = Sink,
- level = Mask
- }}
- catch
- _:_ ->
- {error, {fatal, bad_log_level}}
- end;
-init(_) ->
- {error, {fatal, bad_config}}.
-
-%% @private
-handle_call(get_loglevel, #state{next_sink = Sink, level = inherit} = State) ->
- SinkPid = whereis(Sink),
- Mask = case self() of
- SinkPid ->
- %% Avoid direct loops, defaults to 'info'.
- 127;
- _ ->
- try
- Levels = [gen_event:call(SinkPid, Handler, get_loglevel,
- infinity)
- || Handler <- gen_event:which_handlers(SinkPid)],
- lists:foldl(fun
- ({mask, Mask}, Acc) ->
- Mask bor Acc;
- (Level, Acc) when is_integer(Level) ->
- {mask, Mask} = lager_util:config_to_mask(
- lager_util:num_to_level(Level)),
- Mask bor Acc;
- (_, Acc) ->
- Acc
- end, 0, Levels)
- catch
- exit:noproc ->
- 127
- end
- end,
- {ok, {mask, Mask}, State};
-handle_call(get_loglevel, #state{level = Mask} = State) ->
- {ok, Mask, State};
-handle_call({set_loglevel, inherit}, State) ->
- {ok, ok, State#state{level = inherit}};
-handle_call({set_loglevel, Level}, State) ->
- try lager_util:config_to_mask(Level) of
- Mask ->
- {ok, ok, State#state{level = Mask}}
- catch
- _:_ ->
- {ok, {error, bad_log_level}, State}
- end;
-handle_call(_Request, State) ->
- {ok, ok, State}.
-
-%% @private
-handle_event({log, LagerMsg}, #state{next_sink = Sink, level = Mask} = State) ->
- SinkPid = whereis(Sink),
- case self() of
- SinkPid ->
- %% Avoid direct loops.
- ok;
- _ ->
- case Mask =:= inherit orelse
- lager_util:is_loggable(LagerMsg, Mask, ?MODULE) of
- true ->
- case lager_config:get({Sink, async}, false) of
- true -> gen_event:notify(SinkPid, {log, LagerMsg});
- false -> gen_event:sync_notify(SinkPid, {log, LagerMsg})
- end;
- false ->
- ok
- end
- end,
- {ok, State};
-handle_event(_Event, State) ->
- {ok, State}.
-
-%% @private
-handle_info(_Info, State) ->
- {ok, State}.
-
-%% @private
-terminate(_Reason, _State) ->
- ok.
-
-%% @private
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
diff --git a/deps/rabbit_common/src/mirrored_supervisor.erl b/deps/rabbit_common/src/mirrored_supervisor.erl
index 61ddc068b6..2ef2bfda6e 100644
--- a/deps/rabbit_common/src/mirrored_supervisor.erl
+++ b/deps/rabbit_common/src/mirrored_supervisor.erl
@@ -2,14 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(mirrored_supervisor).
-%% pg2 is deprecated in OTP 23.
--compile(nowarn_deprecated_function).
-
%% Mirrored Supervisor
%% ===================
%%
@@ -211,6 +208,7 @@ start_link0(Prefix, Group, TxFun, Init) ->
end.
init(Mod, Args) ->
+ _ = pg:start_link(),
case Mod:init(Args) of
{ok, {{Bad, _, _}, _ChildSpecs}} when
Bad =:= simple_one_for_one -> erlang:error(badarg);
@@ -247,7 +245,7 @@ fold(FunAtom, Sup, AggFun) ->
Group = call(Sup, group),
lists:foldl(AggFun, [],
[apply(?SUPERVISOR, FunAtom, [D]) ||
- M <- pg2:get_members(Group),
+ M <- pg:get_members(Group),
D <- [delegate(M)]]).
child(Sup, Id) ->
@@ -279,11 +277,17 @@ handle_call({init, Overall}, _From,
tx_fun = TxFun,
initial_childspecs = ChildSpecs}) ->
process_flag(trap_exit, true),
- pg2:create(Group),
- ok = pg2:join(Group, Overall),
- Rest = pg2:get_members(Group) -- [Overall],
+ LockId = mirrored_supervisor_locks:lock(Group),
+ maybe_log_lock_acquisition_failure(LockId, Group),
+ ok = pg:join(Group, Overall),
+ rabbit_log:debug("Mirrored supervisor: initializing, overall supervisor ~p joined group ~p", [Overall, Group]),
+ Rest = pg:get_members(Group) -- [Overall],
+ Nodes = [node(M) || M <- Rest],
+ rabbit_log:debug("Mirrored supervisor: known group ~p members: ~p on nodes ~p", [Group, Rest, Nodes]),
case Rest of
- [] -> TxFun(fun() -> delete_all(Group) end);
+ [] ->
+ rabbit_log:debug("Mirrored supervisor: no known peer members in group ~p, will delete all child records for it", [Group]),
+ TxFun(fun() -> delete_all(Group) end);
_ -> ok
end,
[begin
@@ -293,8 +297,9 @@ handle_call({init, Overall}, _From,
Delegate = delegate(Overall),
erlang:monitor(process, Delegate),
State1 = State#state{overall = Overall, delegate = Delegate},
- case errors([maybe_start(Group, TxFun, Overall, Delegate, S)
- || S <- ChildSpecs]) of
+ Results = [maybe_start(Group, TxFun, Overall, Delegate, S) || S <- ChildSpecs],
+ mirrored_supervisor_locks:unlock(LockId),
+ case errors(Results) of
[] -> {reply, ok, State1};
Errors -> {stop, {shutdown, Errors}, State1}
end;
@@ -304,11 +309,25 @@ handle_call({start_child, ChildSpec}, _From,
delegate = Delegate,
group = Group,
tx_fun = TxFun}) ->
- {reply, case maybe_start(Group, TxFun, Overall, Delegate, ChildSpec) of
- already_in_mnesia -> {error, already_present};
- {already_in_mnesia, Pid} -> {error, {already_started, Pid}};
- Else -> Else
- end, State};
+ LockId = mirrored_supervisor_locks:lock(Group),
+ maybe_log_lock_acquisition_failure(LockId, Group),
+ rabbit_log:debug("Mirrored supervisor: asked to consider starting a child, group: ~p", [Group]),
+ Result = case maybe_start(Group, TxFun, Overall, Delegate, ChildSpec) of
+ already_in_mnesia ->
+ rabbit_log:debug("Mirrored supervisor: maybe_start for group ~p,"
+ " overall ~p returned 'record already present'", [Group, Overall]),
+ {error, already_present};
+ {already_in_mnesia, Pid} ->
+ rabbit_log:debug("Mirrored supervisor: maybe_start for group ~p,"
+ " overall ~p returned 'already running: ~p'", [Group, Overall, Pid]),
+ {error, {already_started, Pid}};
+ Else ->
+ rabbit_log:debug("Mirrored supervisor: maybe_start for group ~p,"
+ " overall ~p returned ~p", [Group, Overall, Else]),
+ Else
+ end,
+ mirrored_supervisor_locks:unlock(LockId),
+ {reply, Result, State};
handle_call({delete_child, Id}, _From, State = #state{delegate = Delegate,
group = Group,
@@ -355,9 +374,8 @@ handle_info({'DOWN', _Ref, process, Pid, _Reason},
tx_fun = TxFun,
overall = O,
child_order = ChildOrder}) ->
- %% TODO load balance this
- %% No guarantee pg2 will have received the DOWN before us.
- R = case lists:sort(pg2:get_members(Group)) -- [Pid] of
+ %% No guarantee pg will have received the DOWN before us.
+ R = case lists:sort(pg:get_members(Group)) -- [Pid] of
[O | _] -> ChildSpecs =
TxFun(fun() -> update_all(O, Pid) end),
[start(Delegate, ChildSpec)
@@ -382,31 +400,53 @@ code_change(_OldVsn, State, _Extra) ->
%%----------------------------------------------------------------------------
tell_all_peers_to_die(Group, Reason) ->
- [cast(P, {die, Reason}) || P <- pg2:get_members(Group) -- [self()]].
+ [cast(P, {die, Reason}) || P <- pg:get_members(Group) -- [self()]].
maybe_start(Group, TxFun, Overall, Delegate, ChildSpec) ->
+ rabbit_log:debug("Mirrored supervisor: asked to consider starting, group: ~p", [Group]),
try TxFun(fun() -> check_start(Group, Overall, Delegate, ChildSpec) end) of
- start -> start(Delegate, ChildSpec);
- undefined -> already_in_mnesia;
- Pid -> {already_in_mnesia, Pid}
+ start ->
+ rabbit_log:debug("Mirrored supervisor: check_start for group ~p,"
+ " overall ~p returned 'do start'", [Group, Overall]),
+ start(Delegate, ChildSpec);
+ undefined ->
+ rabbit_log:debug("Mirrored supervisor: check_start for group ~p,"
+ " overall ~p returned 'undefined'", [Group, Overall]),
+ already_in_mnesia;
+ Pid ->
+ rabbit_log:debug("Mirrored supervisor: check_start for group ~p,"
+ " overall ~p returned 'already running (~p)'", [Group, Overall, Pid]),
+ {already_in_mnesia, Pid}
catch
%% If we are torn down while in the transaction...
{error, E} -> {error, E}
end.
check_start(Group, Overall, Delegate, ChildSpec) ->
- case mnesia:wread({?TABLE, {Group, id(ChildSpec)}}) of
+ rabbit_log:debug("Mirrored supervisor: check_start for group ~p, id: ~p, overall: ~p",
+ [Group, id(ChildSpec), Overall]),
+ ReadResult = mnesia:wread({?TABLE, {Group, id(ChildSpec)}}),
+ rabbit_log:debug("Mirrored supervisor: check_start table ~s read for key ~p returned ~p",
+ [?TABLE, {Group, id(ChildSpec)}, ReadResult]),
+ case ReadResult of
[] -> _ = write(Group, Overall, ChildSpec),
start;
[S] -> #mirrored_sup_childspec{key = {Group, Id},
mirroring_pid = Pid} = S,
case Overall of
- Pid -> child(Delegate, Id);
- _ -> case supervisor(Pid) of
- dead -> _ = write(Group, Overall, ChildSpec),
- start;
- Delegate0 -> child(Delegate0, Id)
- end
+ Pid ->
+ rabbit_log:debug("Mirrored supervisor: overall matched mirrored pid ~p", [Pid]),
+ child(Delegate, Id);
+ _ ->
+ rabbit_log:debug("Mirrored supervisor: overall ~p did not match mirrored pid ~p", [Overall, Pid]),
+ rabbit_log:debug("Mirrored supervisor: supervisor(~p) returned ~p", [Pid, supervisor(Pid)]),
+ case supervisor(Pid) of
+ dead ->
+ _ = write(Group, Overall, ChildSpec),
+ start;
+ Delegate0 ->
+ child(Delegate0, Id)
+ end
end
end.
@@ -511,3 +551,8 @@ restore_child_order(ChildSpecs, ChildOrder) ->
proplists:get_value(id(A), ChildOrder)
< proplists:get_value(id(B), ChildOrder)
end, ChildSpecs).
+
+maybe_log_lock_acquisition_failure(undefined = _LockId, Group) ->
+ rabbit_log:warning("Mirrored supervisor: could not acquire lock for group ~s", [Group]);
+maybe_log_lock_acquisition_failure(_, _) ->
+ ok.
diff --git a/deps/rabbit_common/src/mirrored_supervisor_locks.erl b/deps/rabbit_common/src/mirrored_supervisor_locks.erl
new file mode 100644
index 0000000000..fc66a9eef3
--- /dev/null
+++ b/deps/rabbit_common/src/mirrored_supervisor_locks.erl
@@ -0,0 +1,33 @@
+ %% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(mirrored_supervisor_locks).
+
+-export([lock/1, unlock/1]).
+
+ -define(KEY_PREFIX, mirrored_supervisor).
+
+%%
+%% API
+%%
+
+lock(Group) ->
+ Nodes = nodes(),
+ %% about 300s, same as rabbit_nodes:lock_retries/0 default
+ LockId = case global:set_lock({?KEY_PREFIX, Group}, Nodes, 80) of
+ true -> Group;
+ false -> undefined
+ end,
+ LockId.
+
+unlock(LockId) ->
+ Nodes = nodes(),
+ case LockId of
+ undefined -> ok;
+ Value -> global:del_lock({?KEY_PREFIX, Value}, Nodes)
+ end,
+ ok.
diff --git a/deps/rabbit_common/src/mnesia_sync.erl b/deps/rabbit_common/src/mnesia_sync.erl
index 2287436849..081e897009 100644
--- a/deps/rabbit_common/src/mnesia_sync.erl
+++ b/deps/rabbit_common/src/mnesia_sync.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(mnesia_sync).
diff --git a/deps/rabbit_common/src/pmon.erl b/deps/rabbit_common/src/pmon.erl
index f44168dfcf..d9cbe463cb 100644
--- a/deps/rabbit_common/src/pmon.erl
+++ b/deps/rabbit_common/src/pmon.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(pmon).
diff --git a/deps/rabbit_common/src/priority_queue.erl b/deps/rabbit_common/src/priority_queue.erl
index 4a7867129d..0c8580cc7c 100644
--- a/deps/rabbit_common/src/priority_queue.erl
+++ b/deps/rabbit_common/src/priority_queue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% Priority queues have essentially the same interface as ordinary
diff --git a/deps/rabbit_common/src/rabbit_amqp_connection.erl b/deps/rabbit_common/src/rabbit_amqp_connection.erl
index 58486bd239..058fa7d590 100644
--- a/deps/rabbit_common/src/rabbit_amqp_connection.erl
+++ b/deps/rabbit_common/src/rabbit_amqp_connection.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp_connection).
@@ -14,11 +14,11 @@ amqp_params(ConnPid, Timeout) ->
P = try
gen_server:call(ConnPid, {info, [amqp_params]}, Timeout)
catch exit:{noproc, Error} ->
- rabbit_log:debug("file ~p, line ~p - connection process ~p not alive: ~p~n",
+ rabbit_log:debug("file ~p, line ~p - connection process ~p not alive: ~p",
[?FILE, ?LINE, ConnPid, Error]),
[];
_:Error ->
- rabbit_log:debug("file ~p, line ~p - failed to get amqp_params from connection process ~p: ~p~n",
+ rabbit_log:debug("file ~p, line ~p - failed to get amqp_params from connection process ~p: ~p",
[?FILE, ?LINE, ConnPid, Error]),
[]
end,
diff --git a/deps/rabbit_common/src/rabbit_amqqueue_common.erl b/deps/rabbit_common/src/rabbit_amqqueue_common.erl
index a45356de78..958c2ce9c3 100644
--- a/deps/rabbit_common/src/rabbit_amqqueue_common.erl
+++ b/deps/rabbit_common/src/rabbit_amqqueue_common.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqqueue_common).
diff --git a/deps/rabbit_common/src/rabbit_auth_backend_dummy.erl b/deps/rabbit_common/src/rabbit_auth_backend_dummy.erl
index 8d30fdca1b..0c18a93745 100644
--- a/deps/rabbit_common/src/rabbit_auth_backend_dummy.erl
+++ b/deps/rabbit_common/src/rabbit_auth_backend_dummy.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_dummy).
diff --git a/deps/rabbit_common/src/rabbit_auth_mechanism.erl b/deps/rabbit_common/src/rabbit_auth_mechanism.erl
index 38d21f3a5a..e9415780c8 100644
--- a/deps/rabbit_common/src/rabbit_auth_mechanism.erl
+++ b/deps/rabbit_common/src/rabbit_auth_mechanism.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_mechanism).
diff --git a/deps/rabbit_common/src/rabbit_authn_backend.erl b/deps/rabbit_common/src/rabbit_authn_backend.erl
index e600ec884f..61d230c69a 100644
--- a/deps/rabbit_common/src/rabbit_authn_backend.erl
+++ b/deps/rabbit_common/src/rabbit_authn_backend.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_authn_backend).
diff --git a/deps/rabbit_common/src/rabbit_authz_backend.erl b/deps/rabbit_common/src/rabbit_authz_backend.erl
index 367aa8d1ef..91b125f766 100644
--- a/deps/rabbit_common/src/rabbit_authz_backend.erl
+++ b/deps/rabbit_common/src/rabbit_authz_backend.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_authz_backend).
diff --git a/deps/rabbit_common/src/rabbit_basic_common.erl b/deps/rabbit_common/src/rabbit_basic_common.erl
index e88f1172af..4facb1f1d0 100644
--- a/deps/rabbit_common/src/rabbit_basic_common.erl
+++ b/deps/rabbit_common/src/rabbit_basic_common.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_basic_common).
diff --git a/deps/rabbit_common/src/rabbit_binary_generator.erl b/deps/rabbit_common/src/rabbit_binary_generator.erl
index 7a56cb92b6..1df98e50c4 100644
--- a/deps/rabbit_common/src/rabbit_binary_generator.erl
+++ b/deps/rabbit_common/src/rabbit_binary_generator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_binary_generator).
@@ -223,7 +223,7 @@ lookup_amqp_exception(#amqp_error{name = Name,
ExplBin = amqp_exception_explanation(Text, Expl),
{ShouldClose, Code, ExplBin, Method};
lookup_amqp_exception(Other, Protocol) ->
- rabbit_log:warning("Non-AMQP exit reason '~p'~n", [Other]),
+ rabbit_log:warning("Non-AMQP exit reason '~p'", [Other]),
{ShouldClose, Code, Text} = Protocol:lookup_amqp_exception(internal_error),
{ShouldClose, Code, Text, none}.
diff --git a/deps/rabbit_common/src/rabbit_binary_parser.erl b/deps/rabbit_common/src/rabbit_binary_parser.erl
index 478b0f0cd2..cfe6f64833 100644
--- a/deps/rabbit_common/src/rabbit_binary_parser.erl
+++ b/deps/rabbit_common/src/rabbit_binary_parser.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_binary_parser).
diff --git a/deps/rabbit_common/src/rabbit_cert_info.erl b/deps/rabbit_common/src/rabbit_cert_info.erl
index 08e6f03c6c..537915ca44 100644
--- a/deps/rabbit_common/src/rabbit_cert_info.erl
+++ b/deps/rabbit_common/src/rabbit_cert_info.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_cert_info).
@@ -17,11 +17,17 @@
extensions/1
]).
+-export([sanitize_other_name/1]).
+
%%--------------------------------------------------------------------------
-export_type([certificate/0]).
--type certificate() :: binary().
+-type certificate() :: public_key:der_encoded().
+
+%% x.509 certificate extensions usually look like key/value pairs but can
+%% be just about any value
+-type certificate_extension_value() :: any().
%%--------------------------------------------------------------------------
%% High-level functions used by reader
@@ -66,7 +72,7 @@ extensions(Cert) ->
Extensions
end, Cert).
--spec subject_alternative_names(certificate()) -> [{atom(), string()}].
+-spec subject_alternative_names(certificate()) -> [certificate_extension_value()].
subject_alternative_names(Cert) ->
Extensions = extensions(Cert),
try lists:keyfind(?'id-ce-subjectAltName', #'Extension'.extnID, Extensions) of
@@ -103,6 +109,15 @@ find_by_type(Type, {rdnSequence, RDNs}) ->
%% Formatting functions
%%--------------------------------------------------------------------------
+sanitize_other_name(Bin) when is_binary(Bin) ->
+ %% We make a wild assumption about the types here
+ %% but ASN.1 decoding functions in OTP only offer so much and SAN values
+ %% are expected to be "string-like" by RabbitMQ
+ case 'OTP-PUB-KEY':decode('DirectoryString', Bin) of
+ {ok, {_, Val}} -> Val;
+ Other -> Other
+ end.
+
%% Format and rdnSequence as a RFC4514 subject string.
format_rdn_sequence({rdnSequence, Seq}) ->
string:join(lists:reverse([format_complex_rdn(RDN) || RDN <- Seq]), ",").
diff --git a/deps/rabbit_common/src/rabbit_channel_common.erl b/deps/rabbit_common/src/rabbit_channel_common.erl
index a21e17b2e7..9b7e144c02 100644
--- a/deps/rabbit_common/src/rabbit_channel_common.erl
+++ b/deps/rabbit_common/src/rabbit_channel_common.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_channel_common).
diff --git a/deps/rabbit_common/src/rabbit_command_assembler.erl b/deps/rabbit_common/src/rabbit_command_assembler.erl
index ea6b19d083..80cb70fc27 100644
--- a/deps/rabbit_common/src/rabbit_command_assembler.erl
+++ b/deps/rabbit_common/src/rabbit_command_assembler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_command_assembler).
diff --git a/deps/rabbit_common/src/rabbit_control_misc.erl b/deps/rabbit_common/src/rabbit_control_misc.erl
index 0fff88a2cd..9abb20a27c 100644
--- a/deps/rabbit_common/src/rabbit_control_misc.erl
+++ b/deps/rabbit_common/src/rabbit_control_misc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_control_misc).
diff --git a/deps/rabbit_common/src/rabbit_core_metrics.erl b/deps/rabbit_common/src/rabbit_core_metrics.erl
index 3a6732c0d2..0bd61d1e07 100644
--- a/deps/rabbit_common/src/rabbit_core_metrics.erl
+++ b/deps/rabbit_common/src/rabbit_core_metrics.erl
@@ -2,13 +2,14 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_core_metrics).
-include("rabbit_core_metrics.hrl").
+-export([create_table/1]).
-export([init/0]).
-export([terminate/0]).
@@ -104,9 +105,13 @@
%%----------------------------------------------------------------------------
%% API
%%----------------------------------------------------------------------------
+
+create_table({Table, Type}) ->
+ ets:new(Table, [Type, public, named_table, {write_concurrency, true},
+ {read_concurrency, true}]).
+
init() ->
- _ = [ets:new(Table, [Type, public, named_table, {write_concurrency, true},
- {read_concurrency, true}])
+ _ = [create_table({Table, Type})
|| {Table, Type} <- ?CORE_TABLES ++ ?CORE_EXTRA_TABLES],
ok.
diff --git a/deps/rabbit_common/src/rabbit_data_coercion.erl b/deps/rabbit_common/src/rabbit_data_coercion.erl
index 9d2b39da94..35f6171abf 100644
--- a/deps/rabbit_common/src/rabbit_data_coercion.erl
+++ b/deps/rabbit_common/src/rabbit_data_coercion.erl
@@ -2,13 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_data_coercion).
-export([to_binary/1, to_list/1, to_atom/1, to_integer/1, to_proplist/1, to_map/1]).
--export([to_atom/2]).
+-export([to_atom/2, atomize_keys/1]).
-spec to_binary(Val :: binary() | list() | atom() | integer()) -> binary().
to_binary(Val) when is_list(Val) -> list_to_binary(Val);
@@ -45,3 +45,10 @@ to_proplist(Val) when is_map(Val) -> maps:to_list(Val).
-spec to_map(Val :: map() | list()) -> map().
to_map(Val) when is_map(Val) -> Val;
to_map(Val) when is_list(Val) -> maps:from_list(Val).
+
+
+-spec atomize_keys(Val :: map() | list()) -> map() | list().
+atomize_keys(Val) when is_list(Val) ->
+ [{to_atom(K), V} || {K, V} <- Val];
+atomize_keys(Val) when is_map(Val) ->
+ maps:from_list(atomize_keys(maps:to_list(Val))). \ No newline at end of file
diff --git a/deps/rabbit_common/src/rabbit_date_time.erl b/deps/rabbit_common/src/rabbit_date_time.erl
new file mode 100644
index 0000000000..e4a56ad783
--- /dev/null
+++ b/deps/rabbit_common/src/rabbit_date_time.erl
@@ -0,0 +1,48 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_date_time).
+
+-export([parse_duration/1]).
+
+-type datetime_plist() :: list({atom(), integer()}).
+
+% from https://github.com/erlsci/iso8601/blob/main/src/iso8601.erl
+-spec gi(string()) -> integer().
+gi(DS) ->
+ {Int, _Rest} = string:to_integer(DS),
+ case Int of
+ error ->
+ 0;
+ _ ->
+ Int
+ end.
+
+-spec parse_duration(string()) -> datetime_plist().
+parse_duration(Bin)
+ when is_binary(Bin) -> %TODO extended format
+ parse_duration(binary_to_list(Bin));
+parse_duration(Str) ->
+ case re:run(Str,
+ "^(?<sign>-|\\+)?P(?:(?<years>[0-9]+)Y)?(?:(?<months>[0"
+ "-9]+)M)?(?:(?<days>[0-9]+)D)?(T(?:(?<hours>[0-9]+)H)?("
+ "?:(?<minutes>[0-9]+)M)?(?:(?<seconds>[0-9]+(?:\\.[0-9]"
+ "+)?)S)?)?$",
+ [{capture, [sign, years, months, days, hours, minutes, seconds],
+ list}])
+ of
+ {match, [Sign, Years, Months, Days, Hours, Minutes, Seconds]} ->
+ {ok, [{sign, Sign},
+ {years, gi(Years)},
+ {months, gi(Months)},
+ {days, gi(Days)},
+ {hours, gi(Hours)},
+ {minutes, gi(Minutes)},
+ {seconds, gi(Seconds)}]};
+ nomatch ->
+ error
+ end.
diff --git a/deps/rabbit_common/src/rabbit_env.erl b/deps/rabbit_common/src/rabbit_env.erl
index 8817103e81..d6f60f9ae6 100644
--- a/deps/rabbit_common/src/rabbit_env.erl
+++ b/deps/rabbit_common/src/rabbit_env.erl
@@ -2,12 +2,15 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_env).
-include_lib("kernel/include/file.hrl").
+-include_lib("kernel/include/logger.hrl").
+
+-include("logging.hrl").
-export([get_context/0,
get_context/1,
@@ -16,6 +19,9 @@
get_context_after_logging_init/1,
get_context_after_reloading_env/1,
dbg_config/0,
+ env_vars/0,
+ has_var_been_overridden/1,
+ has_var_been_overridden/2,
get_used_env_vars/0,
log_process_env/0,
log_context/1,
@@ -28,6 +34,14 @@
value_is_yes/1]).
-endif.
+%% Vary from OTP version to version.
+-ignore_xref([
+ {os, env, 0},
+ {os, list_env_vars, 0}
+]).
+%% Relies on functions only available in certain OTP versions.
+-dialyzer({nowarn_function, [env_vars/0]}).
+
-define(USED_ENV_VARS,
[
"RABBITMQ_ALLOW_INPUT",
@@ -37,9 +51,13 @@
"RABBITMQ_CONFIG_FILE",
"RABBITMQ_CONFIG_FILES",
"RABBITMQ_DBG",
+ "RABBITMQ_DEFAULT_PASS",
+ "RABBITMQ_DEFAULT_USER",
+ "RABBITMQ_DEFAULT_VHOST",
"RABBITMQ_DIST_PORT",
"RABBITMQ_ENABLED_PLUGINS",
"RABBITMQ_ENABLED_PLUGINS_FILE",
+ "RABBITMQ_ERLANG_COOKIE",
"RABBITMQ_FEATURE_FLAGS",
"RABBITMQ_FEATURE_FLAGS_FILE",
"RABBITMQ_HOME",
@@ -66,6 +84,10 @@
"SYS_PREFIX"
]).
+-export_type([context/0]).
+
+-type context() :: map().
+
get_context() ->
Context0 = get_context_before_logging_init(),
Context1 = get_context_after_logging_init(Context0),
@@ -134,6 +156,10 @@ get_context_after_reloading_env(Context) ->
fun plugins_expand_dir/1,
fun enabled_plugins_file/1,
fun enabled_plugins/1,
+ fun default_vhost/1,
+ fun default_user/1,
+ fun default_pass/1,
+ fun erlang_cookie/1,
fun maybe_stop_dist_for_remote_query/1,
fun amqp_ipaddr/1,
fun amqp_tcp_port/1,
@@ -210,33 +236,50 @@ update_context(Context, Key, Value, Origin)
Context#{Key => Value,
var_origins => #{Key => Origin}}.
+env_vars() ->
+ case erlang:function_exported(os, list_env_vars, 0) of
+ true -> os:list_env_vars(); %% OTP < 24
+ false -> os:env() %% OTP >= 24
+ end.
+
+has_var_been_overridden(Var) ->
+ has_var_been_overridden(get_context(), Var).
+
+has_var_been_overridden(#{var_origins := Origins}, Var) ->
+ case maps:get(Var, Origins, default) of
+ default -> false;
+ _ -> true
+ end.
+
get_used_env_vars() ->
lists:filter(
fun({Var, _}) -> var_is_used(Var) end,
- lists:sort(os:list_env_vars())).
+ lists:sort(env_vars())).
log_process_env() ->
- rabbit_log_prelaunch:debug("Process environment:"),
+ ?LOG_DEBUG("Process environment:"),
lists:foreach(
fun({Var, Value}) ->
- rabbit_log_prelaunch:debug(" - ~s = ~ts", [Var, Value])
- end, lists:sort(os:list_env_vars())).
+ ?LOG_DEBUG(" - ~s = ~ts", [Var, Value])
+ end, lists:sort(env_vars())).
log_context(Context) ->
- rabbit_log_prelaunch:debug("Context (based on environment variables):"),
+ ?LOG_DEBUG("Context (based on environment variables):"),
lists:foreach(
fun(Key) ->
Value = maps:get(Key, Context),
- rabbit_log_prelaunch:debug(" - ~s: ~p", [Key, Value])
+ ?LOG_DEBUG(" - ~s: ~p", [Key, Value])
end,
lists:sort(maps:keys(Context))).
context_to_app_env_vars(Context) ->
- rabbit_log_prelaunch:debug(
- "Setting default application environment variables:"),
+ ?LOG_DEBUG(
+ "Setting default application environment variables:",
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
Fun = fun({App, Param, Value}) ->
- rabbit_log_prelaunch:debug(
- " - ~s:~s = ~p", [App, Param, Value]),
+ ?LOG_DEBUG(
+ " - ~s:~s = ~p", [App, Param, Value],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
ok = application:set_env(
App, Param, Value, [{persistent, true}])
end,
@@ -608,6 +651,18 @@ parse_log_levels([CategoryValue | Rest], Result) ->
["-color"] ->
Result1 = Result#{color => false},
parse_log_levels(Rest, Result1);
+ ["+json"] ->
+ Result1 = Result#{json => true},
+ parse_log_levels(Rest, Result1);
+ ["-json"] ->
+ Result1 = Result#{json => false},
+ parse_log_levels(Rest, Result1);
+ ["+single_line"] ->
+ Result1 = Result#{single_line => true},
+ parse_log_levels(Rest, Result1);
+ ["-single_line"] ->
+ Result1 = Result#{single_line => false},
+ parse_log_levels(Rest, Result1);
[CategoryOrLevel] ->
case parse_level(CategoryOrLevel) of
undefined ->
@@ -662,8 +717,14 @@ main_log_file(#{nodename := Nodename,
File= filename:join(LogBaseDir,
atom_to_list(Nodename) ++ ".log"),
update_context(Context, main_log_file, File, default);
- "-" ->
- update_context(Context, main_log_file, "-", environment);
+ "-" = Value ->
+ update_context(Context, main_log_file, Value, environment);
+ "-stderr" = Value ->
+ update_context(Context, main_log_file, Value, environment);
+ "exchange:" ++ _ = Value ->
+ update_context(Context, main_log_file, Value, environment);
+ "syslog:" ++ _ = Value ->
+ update_context(Context, main_log_file, Value, environment);
Value ->
File = normalize_path(Value),
update_context(Context, main_log_file, File, environment)
@@ -1171,9 +1232,10 @@ amqp_tcp_port(Context) ->
update_context(Context, amqp_tcp_port, TcpPort, environment)
catch
_:badarg ->
- rabbit_log_prelaunch:error(
- "Invalid value for $RABBITMQ_NODE_PORT: ~p",
- [TcpPortStr]),
+ ?LOG_ERROR(
+ "Invalid value for $RABBITMQ_NODE_PORT: ~p",
+ [TcpPortStr],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({exit, ex_config})
end
end.
@@ -1190,9 +1252,10 @@ erlang_dist_tcp_port(#{amqp_tcp_port := AmqpTcpPort} = Context) ->
erlang_dist_tcp_port, TcpPort, environment)
catch
_:badarg ->
- rabbit_log_prelaunch:error(
- "Invalid value for $RABBITMQ_DIST_PORT: ~p",
- [TcpPortStr]),
+ ?LOG_ERROR(
+ "Invalid value for $RABBITMQ_DIST_PORT: ~p",
+ [TcpPortStr],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
throw({exit, ex_config})
end
end.
@@ -1391,6 +1454,62 @@ motd_file_from_node(#{from_remote_node := Remote} = Context) ->
end.
%% -------------------------------------------------------------------
+%%
+%% RABBITMQ_DEFAULT_VHOST
+%% Override the default virtual host.
+%% Default: unset (i.e. <<"/">>)
+%%
+%% RABBITMQ_DEFAULT_USER
+%% Override the default username.
+%% Default: unset (i.e. <<"guest">>).
+%%
+%% RABBITMQ_MOTD_FILE
+%% Override the default user's password.
+%% Default: unset (i.e. <<"guest">>).
+
+default_vhost(Context) ->
+ case get_prefixed_env_var("RABBITMQ_DEFAULT_VHOST") of
+ false ->
+ update_context(Context, default_vhost, undefined, default);
+ Value ->
+ VHost = list_to_binary(Value),
+ update_context(Context, default_vhost, VHost, environment)
+ end.
+
+default_user(Context) ->
+ case get_prefixed_env_var("RABBITMQ_DEFAULT_USER") of
+ false ->
+ update_context(Context, default_user, undefined, default);
+ Value ->
+ Username = list_to_binary(Value),
+ update_context(Context, default_user, Username, environment)
+ end.
+
+default_pass(Context) ->
+ case get_prefixed_env_var("RABBITMQ_DEFAULT_PASS") of
+ false ->
+ update_context(Context, default_pass, undefined, default);
+ Value ->
+ Password = list_to_binary(Value),
+ update_context(Context, default_pass, Password, environment)
+ end.
+
+%% -------------------------------------------------------------------
+%%
+%% RABBITMQ_ERLANG_COOKIE
+%% Override the on-disk Erlang cookie.
+%% Default: unset (i.e. defaults to the content of ~/.erlang.cookie)
+
+erlang_cookie(Context) ->
+ case get_prefixed_env_var("RABBITMQ_ERLANG_COOKIE") of
+ false ->
+ update_context(Context, erlang_cookie, undefined, default);
+ Value ->
+ Cookie = list_to_atom(Value),
+ update_context(Context, erlang_cookie, Cookie, environment)
+ end.
+
+%% -------------------------------------------------------------------
%% Loading of rabbitmq-env.conf.
%% -------------------------------------------------------------------
@@ -1410,8 +1529,9 @@ load_conf_env_file(#{os_type := {unix, _},
true ->
case filelib:is_regular(ConfEnvFile) of
false ->
- rabbit_log_prelaunch:debug(
- "No $RABBITMQ_CONF_ENV_FILE (~ts)", [ConfEnvFile]),
+ ?LOG_DEBUG(
+ "No $RABBITMQ_CONF_ENV_FILE (~ts)", [ConfEnvFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
Context1;
true ->
case os:find_executable("sh") of
@@ -1422,9 +1542,10 @@ load_conf_env_file(#{os_type := {unix, _},
end
end;
false ->
- rabbit_log_prelaunch:debug(
- "Loading of $RABBITMQ_CONF_ENV_FILE (~ts) is disabled",
- [ConfEnvFile]),
+ ?LOG_DEBUG(
+ "Loading of $RABBITMQ_CONF_ENV_FILE (~ts) is disabled",
+ [ConfEnvFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
Context1
end;
load_conf_env_file(#{os_type := {win32, _},
@@ -1442,8 +1563,9 @@ load_conf_env_file(#{os_type := {win32, _},
true ->
case filelib:is_regular(ConfEnvFile) of
false ->
- rabbit_log_prelaunch:debug(
- "No $RABBITMQ_CONF_ENV_FILE (~ts)", [ConfEnvFile]),
+ ?LOG_DEBUG(
+ "No $RABBITMQ_CONF_ENV_FILE (~ts)", [ConfEnvFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
Context1;
true ->
case os:find_executable("cmd.exe") of
@@ -1463,9 +1585,10 @@ load_conf_env_file(#{os_type := {win32, _},
end
end;
false ->
- rabbit_log_prelaunch:debug(
- "Loading of $RABBITMQ_CONF_ENV_FILE (~ts) is disabled",
- [ConfEnvFile]),
+ ?LOG_DEBUG(
+ "Loading of $RABBITMQ_CONF_ENV_FILE (~ts) is disabled",
+ [ConfEnvFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
Context1
end;
load_conf_env_file(Context) ->
@@ -1487,8 +1610,9 @@ loading_conf_env_file_enabled(_) ->
-endif.
do_load_conf_env_file(#{os_type := {unix, _}} = Context, Sh, ConfEnvFile) ->
- rabbit_log_prelaunch:debug(
- "Sourcing $RABBITMQ_CONF_ENV_FILE: ~ts", [ConfEnvFile]),
+ ?LOG_DEBUG(
+ "Sourcing $RABBITMQ_CONF_ENV_FILE: ~ts", [ConfEnvFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
%% The script below sources the `CONF_ENV_FILE` file, then it shows a
%% marker line and all environment variables.
@@ -1530,8 +1654,9 @@ do_load_conf_env_file(#{os_type := {unix, _}} = Context, Sh, ConfEnvFile) ->
collect_conf_env_file_output(Context, Port, Marker, <<>>);
do_load_conf_env_file(#{os_type := {win32, _}} = Context, Cmd, ConfEnvFile) ->
%% rabbitmq/rabbitmq-common#392
- rabbit_log_prelaunch:debug(
- "Executing $RABBITMQ_CONF_ENV_FILE: ~ts", [ConfEnvFile]),
+ ?LOG_DEBUG(
+ "Executing $RABBITMQ_CONF_ENV_FILE: ~ts", [ConfEnvFile],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
%% The script below executes the `CONF_ENV_FILE` file, then it shows a
%% marker line and all environment variables.
@@ -1592,17 +1717,20 @@ collect_conf_env_file_output(Context, Port, Marker, Output) ->
end.
post_port_cmd_output(#{os_type := {OSType, _}}, Output, ExitStatus) ->
- rabbit_log_prelaunch:debug(
- "$RABBITMQ_CONF_ENV_FILE exit status: ~b",
- [ExitStatus]),
+ ?LOG_DEBUG(
+ "$RABBITMQ_CONF_ENV_FILE exit status: ~b",
+ [ExitStatus],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
DecodedOutput = unicode:characters_to_list(Output),
LineSep = case OSType of
win32 -> "\r\n";
_ -> "\n"
end,
Lines = string:split(string:trim(DecodedOutput), LineSep, all),
- rabbit_log_prelaunch:debug("$RABBITMQ_CONF_ENV_FILE output:"),
- [rabbit_log_prelaunch:debug(" ~ts", [Line]) || Line <- Lines],
+ ?LOG_DEBUG(
+ "$RABBITMQ_CONF_ENV_FILE output:~n~ts",
+ [string:join([io_lib:format(" ~ts", [Line]) || Line <- Lines], "\n")],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
Lines.
parse_conf_env_file_output(Context, _, []) ->
@@ -1622,9 +1750,10 @@ parse_conf_env_file_output1(Context, Lines) ->
IsSet = var_is_set(Var),
case IsUsed andalso not IsSet of
true ->
- rabbit_log_prelaunch:debug(
- "$RABBITMQ_CONF_ENV_FILE: re-exporting variable $~s",
- [Var]),
+ ?LOG_DEBUG(
+ "$RABBITMQ_CONF_ENV_FILE: re-exporting variable $~s",
+ [Var],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
os:putenv(Var, maps:get(Var, Vars));
false ->
ok
@@ -1650,31 +1779,56 @@ parse_conf_env_file_output2([Line | Lines], Vars) ->
parse_conf_env_file_output2(Lines1, Vars1);
_ ->
%% Parsing failed somehow.
- rabbit_log_prelaunch:warning(
- "Failed to parse $RABBITMQ_CONF_ENV_FILE output: ~p",
- [Line]),
+ ?LOG_WARNING(
+ "Failed to parse $RABBITMQ_CONF_ENV_FILE output: ~p",
+ [Line],
+ #{domain => ?RMQLOG_DOMAIN_PRELAUNCH}),
#{}
end
end.
is_sh_set_x_output(Line) ->
- re:run(Line, "^\\++ ", [{capture, none}]) =:= match.
+ UnicodeLine = unicode:characters_to_binary(Line),
+ re:run(UnicodeLine, "^\\++ ", [{capture, none}]) =:= match.
is_sh_function(_, []) ->
false;
is_sh_function(Line, Lines) ->
- re:run(Line, "\\s\\(\\)\\s*$", [{capture, none}]) =:= match
+ UnicodeLine1 = unicode:characters_to_binary(Line),
+ UnicodeLine2 = unicode:characters_to_binary(hd(Lines)),
+ re:run(UnicodeLine1, "\\s\\(\\)\\s*$", [{capture, none}]) =:= match
andalso
- re:run(hd(Lines), "^\\s*\\{\\s*$", [{capture, none}]) =:= match.
+ re:run(UnicodeLine2, "^\\s*\\{\\s*$", [{capture, none}]) =:= match.
-parse_sh_literal("'" ++ SingleQuoted, Lines, Literal) ->
+parse_sh_literal([$' | SingleQuoted], Lines, Literal) ->
parse_single_quoted_literal(SingleQuoted, Lines, Literal);
-parse_sh_literal("\"" ++ DoubleQuoted, Lines, Literal) ->
+parse_sh_literal([$" | DoubleQuoted], Lines, Literal) ->
parse_double_quoted_literal(DoubleQuoted, Lines, Literal);
-parse_sh_literal("$'" ++ DollarSingleQuoted, Lines, Literal) ->
+parse_sh_literal([$$, $' | DollarSingleQuoted], Lines, Literal) ->
parse_dollar_single_quoted_literal(DollarSingleQuoted, Lines, Literal);
+parse_sh_literal([], Lines, Literal) ->
+ %% We reached the end of the literal.
+ {lists:reverse(Literal), Lines};
parse_sh_literal(Unquoted, Lines, Literal) ->
- {lists:reverse(Literal) ++ Unquoted, Lines}.
+ parse_unquoted_literal(Unquoted, Lines, Literal).
+
+parse_unquoted_literal([$\\], [Line | Lines], Literal) ->
+ %% The newline character is escaped: it means line continuation.
+ parse_unquoted_literal(Line, Lines, Literal);
+parse_unquoted_literal([$\\, C | Rest], Lines, Literal) ->
+ %% This is an escaped character, so we "eat" the two characters but append
+ %% only the escaped one.
+ parse_unquoted_literal(Rest, Lines, [C | Literal]);
+parse_unquoted_literal([C | _] = Rest, Lines, Literal)
+ when C =:= $' orelse C =:= $" ->
+ %% We reached the end of the unquoted literal and the beginning of a quoted
+ %% literal. Both are concatenated.
+ parse_sh_literal(Rest, Lines, Literal);
+parse_unquoted_literal([C | Rest], Lines, Literal) ->
+ parse_unquoted_literal(Rest, Lines, [C | Literal]);
+parse_unquoted_literal([], Lines, Literal) ->
+ %% We reached the end of the unquoted literal.
+ parse_sh_literal([], Lines, Literal).
parse_single_quoted_literal([$' | Rest], Lines, Literal) ->
%% We reached the closing single quote.
@@ -1687,6 +1841,14 @@ parse_single_quoted_literal([], [Line | Lines], Literal) ->
parse_single_quoted_literal([C | Rest], Lines, Literal) ->
parse_single_quoted_literal(Rest, Lines, [C | Literal]).
+parse_double_quoted_literal([$\\], [Line | Lines], Literal) ->
+ %% The newline character is escaped: it means line continuation.
+ parse_double_quoted_literal(Line, Lines, Literal);
+parse_double_quoted_literal([$\\, C | Rest], Lines, Literal)
+ when C =:= $$ orelse C =:= $` orelse C =:= $" orelse C =:= $\\ ->
+ %% This is an escaped character, so we "eat" the two characters but append
+ %% only the escaped one.
+ parse_double_quoted_literal(Rest, Lines, [C | Literal]);
parse_double_quoted_literal([$" | Rest], Lines, Literal) ->
%% We reached the closing double quote.
parse_sh_literal(Rest, Lines, Literal);
@@ -1698,6 +1860,59 @@ parse_double_quoted_literal([], [Line | Lines], Literal) ->
parse_double_quoted_literal([C | Rest], Lines, Literal) ->
parse_double_quoted_literal(Rest, Lines, [C | Literal]).
+-define(IS_OCTAL(C), C >= $0 andalso C < $8).
+-define(IS_HEX(C),
+ (C >= $0 andalso C =< $9) orelse
+ (C >= $a andalso C =< $f) orelse
+ (C >= $A andalso C =< $F)).
+
+parse_dollar_single_quoted_literal([$\\, C1, C2, C3 | Rest], Lines, Literal)
+ when ?IS_OCTAL(C1) andalso ?IS_OCTAL(C2) andalso ?IS_OCTAL(C3) ->
+ %% An octal-based escaped character.
+ C = octal_to_character([C1, C2, C3]),
+ parse_dollar_single_quoted_literal(Rest, Lines, [C | Literal]);
+parse_dollar_single_quoted_literal([$\\, $x, C1, C2 | Rest], Lines, Literal)
+ when ?IS_HEX(C1) andalso ?IS_HEX(C2) ->
+ %% A hex-based escaped character.
+ C = hex_to_character([C1, C2]),
+ parse_dollar_single_quoted_literal(Rest, Lines, [C | Literal]);
+parse_dollar_single_quoted_literal([$\\, $u,
+ C1, C2, C3, C4 | Rest],
+ Lines, Literal)
+ when ?IS_HEX(C1) andalso ?IS_HEX(C2) andalso
+ ?IS_HEX(C3) andalso ?IS_HEX(C4) ->
+ %% A hex-based escaped character.
+ C = hex_to_character([C1, C2, C3, C4]),
+ parse_dollar_single_quoted_literal(Rest, Lines, [C | Literal]);
+parse_dollar_single_quoted_literal([$\\, $U,
+ C1, C2, C3, C4,
+ C5, C6, C7, C8 | Rest],
+ Lines, Literal)
+ when ?IS_HEX(C1) andalso ?IS_HEX(C2) andalso
+ ?IS_HEX(C3) andalso ?IS_HEX(C4) andalso
+ ?IS_HEX(C5) andalso ?IS_HEX(C6) andalso
+ ?IS_HEX(C7) andalso ?IS_HEX(C8) ->
+ %% A hex-based escaped character.
+ C = hex_to_character([C1, C2, C3, C4, C5, C6, C7, C8]),
+ parse_dollar_single_quoted_literal(Rest, Lines, [C | Literal]);
+parse_dollar_single_quoted_literal([$\\, C1 | Rest], Lines, Literal)
+ when C1 =:= $a orelse
+ C1 =:= $b orelse
+ C1 =:= $e orelse
+ C1 =:= $E orelse
+ C1 =:= $f orelse
+ C1 =:= $n orelse
+ C1 =:= $r orelse
+ C1 =:= $t orelse
+ C1 =:= $v orelse
+ C1 =:= $\\ orelse
+ C1 =:= $' orelse
+ C1 =:= $" orelse
+ C1 =:= $? ->
+ %% This is an escaped character, so we "eat" the two characters but append
+ %% only the escaped one.
+ C = esc_to_character(C1),
+ parse_dollar_single_quoted_literal(Rest, Lines, [C | Literal]);
parse_dollar_single_quoted_literal([$'], Lines, Literal) ->
%% We reached the closing single quote.
{lists:reverse(Literal), Lines};
@@ -1709,6 +1924,37 @@ parse_dollar_single_quoted_literal([], [Line | Lines], Literal) ->
parse_dollar_single_quoted_literal([C | Rest], Lines, Literal) ->
parse_dollar_single_quoted_literal(Rest, Lines, [C | Literal]).
+octal_to_character(List) ->
+ octal_to_character(List, 0).
+
+octal_to_character([D | Rest], C) when ?IS_OCTAL(D) ->
+ octal_to_character(Rest, C * 8 + D - $0);
+octal_to_character([], C) ->
+ C.
+
+hex_to_character(List) ->
+ hex_to_character(List, 0).
+
+hex_to_character([D | Rest], C) ->
+ hex_to_character(Rest, C * 16 + hex_to_int(D));
+hex_to_character([], C) ->
+ C.
+
+hex_to_int(C) when C >= $0 andalso C =< $9 -> C - $0;
+hex_to_int(C) when C >= $a andalso C =< $f -> 10 + C - $a;
+hex_to_int(C) when C >= $A andalso C =< $F -> 10 + C - $A.
+
+esc_to_character($a) -> 7; % Bell
+esc_to_character($b) -> 8; % Backspace
+esc_to_character($e) -> 27; % Esc
+esc_to_character($E) -> 27; % Esc
+esc_to_character($f) -> 12; % Form feed
+esc_to_character($n) -> $\n; % Newline
+esc_to_character($r) -> 13; % Carriage return
+esc_to_character($t) -> 9; % Horizontal tab
+esc_to_character($v) -> 11; % Vertical tab
+esc_to_character(C) -> C.
+
skip_sh_function(["}" | Lines], Vars) ->
parse_conf_env_file_output2(Lines, Vars);
skip_sh_function([_ | Lines], Vars) ->
diff --git a/deps/rabbit_common/src/rabbit_error_logger_handler.erl b/deps/rabbit_common/src/rabbit_error_logger_handler.erl
index 714790a449..8ebadd6e85 100644
--- a/deps/rabbit_common/src/rabbit_error_logger_handler.erl
+++ b/deps/rabbit_common/src/rabbit_error_logger_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_error_logger_handler).
diff --git a/deps/rabbit_common/src/rabbit_event.erl b/deps/rabbit_common/src/rabbit_event.erl
index 152335958a..99e03e9c67 100644
--- a/deps/rabbit_common/src/rabbit_event.erl
+++ b/deps/rabbit_common/src/rabbit_event.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_event).
diff --git a/deps/rabbit_common/src/rabbit_exchange_type.erl b/deps/rabbit_common/src/rabbit_exchange_type.erl
index ebd5cadbdb..2c2f180779 100644
--- a/deps/rabbit_common/src/rabbit_exchange_type.erl
+++ b/deps/rabbit_common/src/rabbit_exchange_type.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type).
diff --git a/deps/rabbit_common/src/rabbit_heartbeat.erl b/deps/rabbit_common/src/rabbit_heartbeat.erl
index 8dbc7f3887..4e99285706 100644
--- a/deps/rabbit_common/src/rabbit_heartbeat.erl
+++ b/deps/rabbit_common/src/rabbit_heartbeat.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_heartbeat).
diff --git a/deps/rabbit_common/src/rabbit_json.erl b/deps/rabbit_common/src/rabbit_json.erl
index a10569135b..484512efc6 100644
--- a/deps/rabbit_common/src/rabbit_json.erl
+++ b/deps/rabbit_common/src/rabbit_json.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_json).
diff --git a/deps/rabbit_common/src/rabbit_log.erl b/deps/rabbit_common/src/rabbit_log.erl
index 22b4619d1c..d774e6dcc2 100644
--- a/deps/rabbit_common/src/rabbit_log.erl
+++ b/deps/rabbit_common/src/rabbit_log.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_log).
--export([log/2, log/3, log/4]).
+-export([log/3, log/4]).
-export([debug/1, debug/2, debug/3,
info/1, info/2, info/3,
notice/1, notice/2, notice/3,
@@ -17,23 +17,14 @@
alert/1, alert/2, alert/3,
emergency/1, emergency/2, emergency/3,
none/1, none/2, none/3]).
--export([make_internal_sink_name/1]).
--include("rabbit_log.hrl").
+-include("logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
%%----------------------------------------------------------------------------
--type category() :: channel |
- connection |
- federation |
- feature_flags |
- ldap |
- mirroring |
- osiris |
- prelaunch |
- queue |
- ra |
- shovel |
- upgrade.
+-type category() :: atom().
-spec debug(string()) -> 'ok'.
-spec debug(string(), [any()]) -> 'ok'.
@@ -65,100 +56,63 @@
%%----------------------------------------------------------------------------
--spec log(category(), lager:log_level(), string()) -> 'ok'.
+-spec log(category(), logger:level(), string()) -> 'ok'.
log(Category, Level, Fmt) -> log(Category, Level, Fmt, []).
--spec log(category(), lager:log_level(), string(), [any()]) -> 'ok'.
+-spec log(category(), logger:level(), string(), [any()]) -> 'ok'.
+log(default, Level, Fmt, Args) when is_list(Args) ->
+ logger:log(Level, Fmt, Args, #{domain => ?RMQLOG_DOMAIN_GLOBAL});
log(Category, Level, Fmt, Args) when is_list(Args) ->
- Sink = case Category of
- default -> ?LAGER_SINK;
- _ -> make_internal_sink_name(Category)
- end,
- lager:log(Sink, Level, self(), Fmt, Args).
-
-%% logger(3) handler.
-log(#{level := Level,
- msg := Msg,
- meta := #{pid := Pid}} = _LogEvent,
- _Config) ->
- case Msg of
- {report, #{label := {error_logger, _}}} ->
- %% Avoid recursive loop.
- ok;
- {report, #{label := {application_controller, progress}}} ->
- %% Already logged by Lager.
- ok;
- {report, #{label := {supervisor, progress}}} ->
- %% Already logged by Lager.
- ok;
- {report, #{report := Report}} ->
- %% FIXME: Is this code reached?
- error_logger:info_report(Report);
- {report, #{format := Format, args := Args}} when is_list(Format) ->
- lager:log(?LAGER_SINK, Level, Pid, Format, Args);
- {string, String} ->
- lager:log(?LAGER_SINK, Level, Pid, "~ts", [String]);
- {Format, Args} when is_list(Format) ->
- lager:log(?LAGER_SINK, Level, Pid, Format, Args)
- end.
-
-make_internal_sink_name(channel) -> rabbit_log_channel_lager_event;
-make_internal_sink_name(connection) -> rabbit_log_connection_lager_event;
-make_internal_sink_name(default) -> rabbit_log_lager_event;
-make_internal_sink_name(feature_flags) -> rabbit_log_feature_flags_lager_event;
-make_internal_sink_name(federation) -> rabbit_log_federation_lager_event;
-make_internal_sink_name(ldap) -> rabbit_log_ldap_lager_event;
-make_internal_sink_name(mirroring) -> rabbit_log_mirroring_lager_event;
-make_internal_sink_name(osiris) -> rabbit_log_osiris_lager_event;
-make_internal_sink_name(prelaunch) -> rabbit_log_prelaunch_lager_event;
-make_internal_sink_name(queue) -> rabbit_log_queue_lager_event;
-make_internal_sink_name(ra) -> rabbit_log_ra_lager_event;
-make_internal_sink_name(shovel) -> rabbit_log_shovel_lager_event;
-make_internal_sink_name(upgrade) -> rabbit_log_upgrade_lager_event;
-make_internal_sink_name(Category) ->
- erlang:error({unknown_category, Category}).
+ logger:log(Level, Fmt, Args, #{domain => ?DEFINE_RMQLOG_DOMAIN(Category)}).
debug(Format) -> debug(Format, []).
debug(Format, Args) -> debug(self(), Format, Args).
-debug(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, debug, Metadata, Format, Args).
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_GLOBAL}).
info(Format) -> info(Format, []).
info(Format, Args) -> info(self(), Format, Args).
-info(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, info, Metadata, Format, Args).
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_GLOBAL}).
notice(Format) -> notice(Format, []).
notice(Format, Args) -> notice(self(), Format, Args).
-notice(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, notice, Metadata, Format, Args).
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_GLOBAL}).
warning(Format) -> warning(Format, []).
warning(Format, Args) -> warning(self(), Format, Args).
-warning(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, warning, Metadata, Format, Args).
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_GLOBAL}).
-error(Format) -> ?MODULE:error(Format, []).
-error(Format, Args) -> ?MODULE:error(self(), Format, Args).
-error(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, error, Metadata, Format, Args).
+error(Format) -> error(Format, []).
+error(Format, Args) -> error(self(), Format, Args).
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_GLOBAL}).
critical(Format) -> critical(Format, []).
critical(Format, Args) -> critical(self(), Format, Args).
-critical(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, critical, Metadata, Format, Args).
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_GLOBAL}).
alert(Format) -> alert(Format, []).
alert(Format, Args) -> alert(self(), Format, Args).
-alert(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, alert, Metadata, Format, Args).
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_GLOBAL}).
emergency(Format) -> emergency(Format, []).
emergency(Format, Args) -> emergency(self(), Format, Args).
-emergency(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, emergency, Metadata, Format, Args).
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_GLOBAL}).
-none(Format) -> none(Format, []).
-none(Format, Args) -> none(self(), Format, Args).
-none(Metadata, Format, Args) ->
- lager:log(?LAGER_SINK, none, Metadata, Format, Args).
+none(_Format) -> ok.
+none(_Format, _Args) -> ok.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbit_common/src/rabbit_log_osiris_shim.erl b/deps/rabbit_common/src/rabbit_log_osiris_shim.erl
deleted file mode 100644
index 09d6a63431..0000000000
--- a/deps/rabbit_common/src/rabbit_log_osiris_shim.erl
+++ /dev/null
@@ -1,26 +0,0 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-%%
-
--module(rabbit_log_osiris_shim).
-
-%% just a shim to redirect logs from ra to rabbit_log
-
--export([log/4]).
-
--spec log(lager:log_level(), string(), [any()], _) -> ok.
-log(Level, Format, Args, _Meta) ->
- rabbit_log:log(osiris, Level, Format, Args),
- ok.
diff --git a/deps/rabbit_common/src/rabbit_log_ra_shim.erl b/deps/rabbit_common/src/rabbit_log_ra_shim.erl
deleted file mode 100644
index 3d35ff6a07..0000000000
--- a/deps/rabbit_common/src/rabbit_log_ra_shim.erl
+++ /dev/null
@@ -1,16 +0,0 @@
-%% This Source Code Form is subject to the terms of the Mozilla Public
-%% License, v. 2.0. If a copy of the MPL was not distributed with this
-%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
-%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-%%
-
--module(rabbit_log_ra_shim).
-
-%% just a shim to redirect logs from ra to rabbit_log
-
--export([log/4]).
-
-log(Level, Format, Args, _Meta) ->
- rabbit_log:log(ra, Level, Format, Args),
- ok.
diff --git a/deps/rabbit_common/src/rabbit_misc.erl b/deps/rabbit_common/src/rabbit_misc.erl
index c5fd86dcbb..112647c581 100644
--- a/deps/rabbit_common/src/rabbit_misc.erl
+++ b/deps/rabbit_common/src/rabbit_misc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_misc).
@@ -327,7 +327,7 @@ assert_args_equivalence1(Orig, New, Name, Key) ->
%%
%% Fixes rabbitmq/rabbitmq-common#341
%%
-assert_field_equivalence(_Orig, _Orig, _Name, _Key) ->
+assert_field_equivalence(Current, Current, _Name, _Key) ->
ok;
assert_field_equivalence(undefined, {longstr, <<"classic">>}, _Name, <<"x-queue-type">>) ->
ok;
diff --git a/deps/rabbit_common/src/rabbit_msg_store_index.erl b/deps/rabbit_common/src/rabbit_msg_store_index.erl
index ce9abe97a6..e51ed43182 100644
--- a/deps/rabbit_common/src/rabbit_msg_store_index.erl
+++ b/deps/rabbit_common/src/rabbit_msg_store_index.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_msg_store_index).
diff --git a/deps/rabbit_common/src/rabbit_net.erl b/deps/rabbit_common/src/rabbit_net.erl
index 7685687ff0..865ca6f253 100644
--- a/deps/rabbit_common/src/rabbit_net.erl
+++ b/deps/rabbit_common/src/rabbit_net.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_net).
@@ -15,7 +15,7 @@
setopts/2, send/2, close/1, fast_close/1, sockname/1, peername/1,
peercert/1, connection_string/2, socket_ends/2, is_loopback/1,
tcp_host/1, unwrap_socket/1, maybe_get_proxy_socket/1,
- hostname/0, getifaddrs/0]).
+ hostname/0, getifaddrs/0, proxy_ssl_info/2]).
%%---------------------------------------------------------------------------
@@ -34,6 +34,7 @@
% -type host_or_ip() :: binary() | inet:ip_address().
-spec is_ssl(socket()) -> boolean().
-spec ssl_info(socket()) -> 'nossl' | ok_val_or_error([{atom(), any()}]).
+-spec proxy_ssl_info(socket(), ranch_proxy:proxy_socket()) -> 'nossl' | ok_val_or_error([{atom(), any()}]).
-spec controlling_process(socket(), pid()) -> ok_or_any_error().
-spec getstat(socket(), [stat_option()]) ->
ok_val_or_error([{stat_option(), integer()}]).
@@ -98,6 +99,19 @@ ssl_info(Sock) when ?IS_SSL(Sock) ->
ssl_info(_Sock) ->
nossl.
+proxy_ssl_info(Sock, {rabbit_proxy_socket, _, ProxyInfo}) ->
+ ConnInfo = ranch_proxy_header:to_connection_info(ProxyInfo),
+ case lists:keymember(protocol, 1, ConnInfo) andalso
+ lists:keymember(selected_cipher_suite, 1, ConnInfo) of
+ true ->
+ {ok, ConnInfo};
+ false ->
+ ssl_info(Sock)
+ end;
+proxy_ssl_info(Sock, _) ->
+ ssl_info(Sock).
+
+
controlling_process(Sock, Pid) when ?IS_SSL(Sock) ->
ssl:controlling_process(Sock, Pid);
controlling_process(Sock, Pid) when is_port(Sock) ->
@@ -228,19 +242,15 @@ socket_ends(Sock, Direction) when ?IS_SSL(Sock);
{_, {error, _Reason} = Error} ->
Error
end;
-socket_ends({rabbit_proxy_socket, CSocket, ProxyInfo}, Direction = inbound) ->
+socket_ends({rabbit_proxy_socket, _, ProxyInfo}, _) ->
#{
- src_address := FromAddress,
- src_port := FromPort
- } = ProxyInfo,
- {_From, To} = sock_funs(Direction),
- case To(CSocket) of
- {ok, {ToAddress, ToPort}} ->
- {ok, {rdns(FromAddress), FromPort,
- rdns(ToAddress), ToPort}};
- {error, _Reason} = Error ->
- Error
- end.
+ src_address := FromAddress,
+ src_port := FromPort,
+ dest_address := ToAddress,
+ dest_port := ToPort
+ } = ProxyInfo,
+ {ok, {rdns(FromAddress), FromPort,
+ rdns(ToAddress), ToPort}}.
maybe_ntoab(Addr) when is_tuple(Addr) -> rabbit_misc:ntoab(Addr);
maybe_ntoab(Host) -> Host.
diff --git a/deps/rabbit_common/src/rabbit_nodes_common.erl b/deps/rabbit_common/src/rabbit_nodes_common.erl
index 7e87ce2ea4..dc00eaa300 100644
--- a/deps/rabbit_common/src/rabbit_nodes_common.erl
+++ b/deps/rabbit_common/src/rabbit_nodes_common.erl
@@ -2,12 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_nodes_common).
--define(EPMD_TIMEOUT, 30000).
+-define(EPMD_OPERATION_TIMEOUT, 6000).
+-define(NAME_LOOKUP_ATTEMPTS, 10).
-define(TCP_DIAGNOSTIC_TIMEOUT, 5000).
-define(ERROR_LOGGER_HANDLER, rabbit_error_logger_handler).
@@ -17,16 +18,20 @@
%% API
%%
--export([make/1, parts/1, names/1, name_type/1, ensure_epmd/0, is_running/2, is_process_running/2]).
+-export([make/1, make/2, parts/1, names/1, name_type/1, ensure_epmd/0, is_running/2, is_process_running/2]).
-export([cookie_hash/0, epmd_port/0, diagnostics/1]).
-spec make({string(), string()} | string()) -> node().
+-spec make(string(), string()) -> node().
-spec parts(node() | string()) -> {string(), string()}.
-spec ensure_epmd() -> 'ok'.
-spec epmd_port() -> string().
-spec names(string()) ->
rabbit_types:ok_or_error2([{string(), integer()}], term()).
+-spec epmd_names(string()) ->
+ rabbit_types:ok_or_error2([{string(), integer()}], term()).
+
-spec diagnostics([node()]) -> string().
-spec cookie_hash() -> string().
@@ -36,12 +41,31 @@
%% Therefore we disable this specific warning.
-dialyzer({nowarn_function, diagnostics_node/1}).
+%% In same case the hostname resolution can take a moment.
+%% In K8s for example *.nodes.default needs some second.
+
names(Hostname) ->
+ names(Hostname, ?NAME_LOOKUP_ATTEMPTS).
+
+names(Hostname, 0) ->
+ epmd_names(Hostname);
+names(Hostname, RetriesLeft) ->
+ rabbit_log:debug("Getting epmd names for hostname '~s', ~b retries left",
+ [Hostname, RetriesLeft]),
+ case catch epmd_names(Hostname) of
+ {ok, R } -> {ok, R};
+ noport ->
+ names(Hostname, RetriesLeft - 1);
+ {error, _} ->
+ names(Hostname, RetriesLeft - 1)
+ end.
+
+epmd_names(Hostname) ->
Self = self(),
Ref = make_ref(),
{Pid, MRef} = spawn_monitor(
fun () -> Self ! {Ref, net_adm:names(Hostname)} end),
- _ = timer:exit_after(?EPMD_TIMEOUT, Pid, timeout),
+ _ = timer:exit_after(?EPMD_OPERATION_TIMEOUT, Pid, timeout),
receive
{Ref, Names} -> erlang:demonitor(MRef, [flush]),
Names;
@@ -54,6 +78,8 @@ make({Prefix, Suffix}) -> rabbit_data_coercion:to_atom(
rabbit_data_coercion:to_list(Suffix)]));
make(NodeStr) -> make(parts(NodeStr)).
+make(Prefix, Suffix) -> make({Prefix, Suffix}).
+
parts(Node) when is_atom(Node) ->
parts(atom_to_list(Node));
parts(NodeStr) ->
@@ -90,7 +116,17 @@ ensure_epmd() ->
port_shutdown_loop(Port) ->
receive
{Port, {exit_status, _Rc}} -> ok;
- {Port, _} -> port_shutdown_loop(Port)
+ {Port, closed} -> ok;
+ {Port, {data, _}} -> port_shutdown_loop(Port);
+ {'EXIT', Port, Reason} ->
+ rabbit_log:error("Failed to start a one-off Erlang VM to keep epmd alive: ~p", [Reason])
+ after 15000 ->
+ %% ensure the port is closed
+ Port ! {self(), close},
+ receive
+ {Port, closed } -> ok
+ after 5000 -> ok
+ end
end.
cookie_hash() ->
@@ -115,7 +151,7 @@ verbose_erlang_distribution(false) ->
current_node_details() ->
[{"~nCurrent node details:~n * node name: ~w", [node()]},
case init:get_argument(home) of
- {ok, [[Home]]} -> {" * effective user's home directory: ~s", [Home]};
+ {ok, [[Home]]} -> {" * effective user's home directory: ~s", [filename:absname(Home)]};
Other -> {" * effective user has no home directory: ~p", [Other]}
end,
{" * Erlang cookie hash: ~s", [cookie_hash()]}].
@@ -190,7 +226,7 @@ connection_succeeded_diagnostics() ->
case gen_event:call(error_logger, ?ERROR_LOGGER_HANDLER, get_connection_report) of
[] ->
[{" * TCP connection succeeded but Erlang distribution failed ~n"
- " * suggestion: check if the Erlang cookie identical for all server nodes and CLI tools~n"
+ " * suggestion: check if the Erlang cookie is identical for all server nodes and CLI tools~n"
" * suggestion: check if all server nodes and CLI tools use consistent hostnames when addressing each other~n"
" * suggestion: check if inter-node connections may be configured to use TLS. If so, all nodes and CLI tools must do that~n"
" * suggestion: see the CLI, clustering and networking guides on https://rabbitmq.com/documentation.html to learn more~n", []}];
diff --git a/deps/rabbit_common/src/rabbit_password_hashing.erl b/deps/rabbit_common/src/rabbit_password_hashing.erl
index 53d4d04e10..035936361c 100644
--- a/deps/rabbit_common/src/rabbit_password_hashing.erl
+++ b/deps/rabbit_common/src/rabbit_password_hashing.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_password_hashing).
diff --git a/deps/rabbit_common/src/rabbit_pbe.erl b/deps/rabbit_common/src/rabbit_pbe.erl
index d999d520a4..7ad8af5035 100644
--- a/deps/rabbit_common/src/rabbit_pbe.erl
+++ b/deps/rabbit_common/src/rabbit_pbe.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_pbe).
diff --git a/deps/rabbit_common/src/rabbit_peer_discovery_backend.erl b/deps/rabbit_common/src/rabbit_peer_discovery_backend.erl
index af3683e72b..14132ae528 100644
--- a/deps/rabbit_common/src/rabbit_peer_discovery_backend.erl
+++ b/deps/rabbit_common/src/rabbit_peer_discovery_backend.erl
@@ -2,7 +2,7 @@
%% from rabbitmq-autocluster by Gavin Roy.
%%
%% Copyright (c) 2014-2015 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates
%% All rights reserved.
%%
%% Redistribution and use in source and binary forms, with or without modification,
@@ -32,7 +32,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2014-2015 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_backend).
@@ -54,6 +54,6 @@
-callback lock(Node :: atom()) -> {ok, Data :: term()} | not_supported | {error, Reason :: string()}.
--callback unlock(Data :: term()) -> ok | {error, Reason :: string()}.
+-callback unlock(Data :: term()) -> ok.
-optional_callbacks([init/0]).
diff --git a/deps/rabbit_common/src/rabbit_policy_validator.erl b/deps/rabbit_common/src/rabbit_policy_validator.erl
index 32b7a44fd9..f78c5af7a6 100644
--- a/deps/rabbit_common/src/rabbit_policy_validator.erl
+++ b/deps/rabbit_common/src/rabbit_policy_validator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_policy_validator).
diff --git a/deps/rabbit_common/src/rabbit_queue_collector.erl b/deps/rabbit_common/src/rabbit_queue_collector.erl
index ffc94ba6fb..1781d83e71 100644
--- a/deps/rabbit_common/src/rabbit_queue_collector.erl
+++ b/deps/rabbit_common/src/rabbit_queue_collector.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_queue_collector).
diff --git a/deps/rabbit_common/src/rabbit_registry.erl b/deps/rabbit_common/src/rabbit_registry.erl
index e68574828c..1731b86add 100644
--- a/deps/rabbit_common/src/rabbit_registry.erl
+++ b/deps/rabbit_common/src/rabbit_registry.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_registry).
diff --git a/deps/rabbit_common/src/rabbit_registry_class.erl b/deps/rabbit_common/src/rabbit_registry_class.erl
index c302dc2311..b651060619 100644
--- a/deps/rabbit_common/src/rabbit_registry_class.erl
+++ b/deps/rabbit_common/src/rabbit_registry_class.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_registry_class).
diff --git a/deps/rabbit_common/src/rabbit_resource_monitor_misc.erl b/deps/rabbit_common/src/rabbit_resource_monitor_misc.erl
index 6661706998..ea6a80edcc 100644
--- a/deps/rabbit_common/src/rabbit_resource_monitor_misc.erl
+++ b/deps/rabbit_common/src/rabbit_resource_monitor_misc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
diff --git a/deps/rabbit_common/src/rabbit_runtime.erl b/deps/rabbit_common/src/rabbit_runtime.erl
index 94a5a5fcfe..ac0ac0aef2 100644
--- a/deps/rabbit_common/src/rabbit_runtime.erl
+++ b/deps/rabbit_common/src/rabbit_runtime.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This module provides access to runtime metrics that are exposed
@@ -56,8 +56,8 @@ msacc_stats(TimeInMs) ->
% get the full path to the erl executable used to start this VM
-spec get_erl_path() -> file:filename_all().
get_erl_path() ->
- {ok, [[Root]]} = init:get_argument(root),
- Bin = filename:join(Root, "bin"),
+ ERTSDir = rabbit_misc:format("erts-~ts", [erlang:system_info(version)]),
+ Bin = filename:join([code:root_dir(), ERTSDir, "bin"]),
case os:type() of
{win32, _} ->
filename:join(Bin, "erl.exe");
diff --git a/deps/rabbit_common/src/rabbit_runtime_parameter.erl b/deps/rabbit_common/src/rabbit_runtime_parameter.erl
index 5f9970d25d..4c3803e1e7 100644
--- a/deps/rabbit_common/src/rabbit_runtime_parameter.erl
+++ b/deps/rabbit_common/src/rabbit_runtime_parameter.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_runtime_parameter).
diff --git a/deps/rabbit_common/src/rabbit_ssl_options.erl b/deps/rabbit_common/src/rabbit_ssl_options.erl
index 4c2967df97..e5302d4420 100644
--- a/deps/rabbit_common/src/rabbit_ssl_options.erl
+++ b/deps/rabbit_common/src/rabbit_ssl_options.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_ssl_options).
@@ -43,7 +43,7 @@ make_verify_fun(Module, Function, InitialUserState) ->
Module:module_info()
catch
_:Exception ->
- rabbit_log:error("SSL verify_fun: module ~s missing: ~p~n",
+ rabbit_log:error("TLS verify_fun: module ~s missing: ~p",
[Module, Exception]),
throw({error, {invalid_verify_fun, missing_module}})
end,
@@ -66,7 +66,7 @@ make_verify_fun(Module, Function, InitialUserState) ->
Module:Function(Args)
end;
_ ->
- rabbit_log:error("SSL verify_fun: no ~s:~s/3 exported~n",
+ rabbit_log:error("TLS verify_fun: no ~s:~s/3 exported",
[Module, Function]),
throw({error, {invalid_verify_fun, function_not_exported}})
end.
diff --git a/deps/rabbit_common/src/rabbit_types.erl b/deps/rabbit_common/src/rabbit_types.erl
index c11004fdf4..0f7782aef6 100644
--- a/deps/rabbit_common/src/rabbit_types.erl
+++ b/deps/rabbit_common/src/rabbit_types.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_types).
diff --git a/deps/rabbit_common/src/rabbit_writer.erl b/deps/rabbit_common/src/rabbit_writer.erl
index 5bce50c87a..476f54ce0c 100644
--- a/deps/rabbit_common/src/rabbit_writer.erl
+++ b/deps/rabbit_common/src/rabbit_writer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_writer).
diff --git a/deps/rabbit_common/src/supervisor2.erl b/deps/rabbit_common/src/supervisor2.erl
index 08c764d0d8..d6e8b82262 100644
--- a/deps/rabbit_common/src/supervisor2.erl
+++ b/deps/rabbit_common/src/supervisor2.erl
@@ -40,7 +40,7 @@
%% 5) normal, and {shutdown, _} exit reasons are all treated the same
%% (i.e. are regarded as normal exits)
%%
-%% All modifications are (C) 2010-2020 VMware, Inc. or its affiliates.
+%% All modifications are (C) 2010-2021 VMware, Inc. or its affiliates.
%%
%% %CopyrightBegin%
%%
diff --git a/deps/rabbit_common/src/vm_memory_monitor.erl b/deps/rabbit_common/src/vm_memory_monitor.erl
index 73b5a23b78..efb2a63776 100644
--- a/deps/rabbit_common/src/vm_memory_monitor.erl
+++ b/deps/rabbit_common/src/vm_memory_monitor.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% In practice Erlang shouldn't be allowed to grow to more than a half
@@ -32,7 +32,9 @@
get_process_memory/0,
get_process_memory/1,
get_memory_calculation_strategy/0,
- get_rss_memory/0]).
+ get_rss_memory/0,
+ interpret_limit/2
+ ]).
%% for tests
-export([parse_line_linux/1, parse_mem_limit/1]).
@@ -57,7 +59,7 @@
%%----------------------------------------------------------------------------
-type memory_calculation_strategy() :: rss | erlang | allocated.
--type vm_memory_high_watermark() :: (float() | {'absolute', integer() | string()}).
+-type vm_memory_high_watermark() :: float() | {'absolute', integer() | string()} | {'relative', float() | integer() | string()}.
-spec start_link(float()) -> rabbit_types:ok_pid_or_error().
-spec start_link(float(), fun ((any()) -> 'ok'),
fun ((any()) -> 'ok')) -> rabbit_types:ok_pid_or_error().
@@ -75,6 +77,7 @@
-spec get_rss_memory() -> non_neg_integer().
-export_type([memory_calculation_strategy/0]).
+
%%----------------------------------------------------------------------------
%% Public API
%%----------------------------------------------------------------------------
@@ -88,7 +91,7 @@ get_total_memory() ->
{error, parse_error} ->
rabbit_log:warning(
"The override value for the total memmory available is "
- "not a valid value: ~p, getting total from the system.~n",
+ "not a valid value: ~p, getting total from the system.",
[Value]),
get_total_memory_from_os()
end;
@@ -122,9 +125,11 @@ get_memory_use(bytes) ->
end};
get_memory_use(ratio) ->
{ProcessMemory, MemoryLimit} = get_cached_process_memory_and_limit(),
- case MemoryLimit > 0.0 of
- true -> ProcessMemory / MemoryLimit;
- false -> infinity
+ case MemoryLimit of
+ infinity -> 0.0;
+ Num when is_number(Num) andalso Num > 0.0 ->
+ ProcessMemory / MemoryLimit;
+ _ -> infinity
end.
%% Memory reported by erlang:memory(total) is not supposed to
@@ -247,7 +252,7 @@ get_cached_process_memory_and_limit() ->
try
gen_server:call(?MODULE, get_cached_process_memory_and_limit, infinity)
catch exit:{noproc, Error} ->
- rabbit_log:warning("Memory monitor process not yet started: ~p~n", [Error]),
+ rabbit_log:warning("Memory monitor process not yet started: ~p", [Error]),
ProcessMemory = get_process_memory_uncached(),
{ProcessMemory, infinity}
end.
@@ -302,11 +307,13 @@ get_total_memory_from_os() ->
get_total_memory(os:type())
catch _:Error:Stacktrace ->
rabbit_log:warning(
- "Failed to get total system memory: ~n~p~n~p~n",
+ "Failed to get total system memory: ~n~p~n~p",
[Error, Stacktrace]),
unknown
end.
+set_mem_limits(State, {relative, MemLimit}) ->
+ set_mem_limits(State, MemLimit);
set_mem_limits(State, MemLimit) ->
case erlang:system_info(wordsize) of
4 ->
@@ -325,7 +332,7 @@ set_mem_limits(State, MemLimit) ->
memory_limit = undefined } ->
rabbit_log:warning(
"Unknown total memory size for your OS ~p. "
- "Assuming memory size is ~p MiB (~p bytes).~n",
+ "Assuming memory size is ~p MiB (~p bytes).",
[os:type(),
trunc(?MEMORY_SIZE_FOR_UNKNOWN_OS/?ONE_MiB),
?MEMORY_SIZE_FOR_UNKNOWN_OS]);
@@ -342,7 +349,7 @@ set_mem_limits(State, MemLimit) ->
"Only ~p MiB (~p bytes) of ~p MiB (~p bytes) memory usable due to "
"limited address space.~n"
"Crashes due to memory exhaustion are possible - see~n"
- "https://www.rabbitmq.com/memory.html#address-space~n",
+ "https://www.rabbitmq.com/memory.html#address-space",
[trunc(Limit/?ONE_MiB), Limit, trunc(TotalMemory/?ONE_MiB),
TotalMemory]),
Limit;
@@ -352,7 +359,7 @@ set_mem_limits(State, MemLimit) ->
MemLim = interpret_limit(parse_mem_limit(MemLimit), UsableMemory),
rabbit_log:info(
"Memory high watermark set to ~p MiB (~p bytes)"
- " of ~p MiB (~p bytes) total~n",
+ " of ~p MiB (~p bytes) total",
[trunc(MemLim/?ONE_MiB), MemLim,
trunc(TotalMemory/?ONE_MiB), TotalMemory]
),
@@ -360,11 +367,16 @@ set_mem_limits(State, MemLimit) ->
memory_limit = MemLim,
memory_config_limit = MemLimit}).
-interpret_limit({'absolute', MemLim}, UsableMemory) ->
+
+-spec interpret_limit(vm_memory_high_watermark(), non_neg_integer()) -> non_neg_integer().
+interpret_limit({absolute, MemLim}, UsableMemory) ->
erlang:min(MemLim, UsableMemory);
+interpret_limit({relative, MemLim}, UsableMemory) ->
+ interpret_limit(MemLim, UsableMemory);
interpret_limit(MemFraction, UsableMemory) ->
trunc(MemFraction * UsableMemory).
+-spec parse_mem_limit(vm_memory_high_watermark()) -> float().
parse_mem_limit({absolute, Limit}) ->
case rabbit_resource_monitor_misc:parse_information_unit(Limit) of
{ok, ParsedLimit} -> {absolute, ParsedLimit};
@@ -372,19 +384,21 @@ parse_mem_limit({absolute, Limit}) ->
rabbit_log:error("Unable to parse vm_memory_high_watermark value ~p", [Limit]),
?DEFAULT_VM_MEMORY_HIGH_WATERMARK
end;
+parse_mem_limit({relative, MemLimit}) ->
+ parse_mem_limit(MemLimit);
parse_mem_limit(MemLimit) when is_integer(MemLimit) ->
parse_mem_limit(float(MemLimit));
parse_mem_limit(MemLimit) when is_float(MemLimit), MemLimit =< ?MAX_VM_MEMORY_HIGH_WATERMARK ->
MemLimit;
parse_mem_limit(MemLimit) when is_float(MemLimit), MemLimit > ?MAX_VM_MEMORY_HIGH_WATERMARK ->
rabbit_log:warning(
- "Memory high watermark of ~p is above the allowed maximum, falling back to ~p~n",
+ "Memory high watermark of ~p is above the allowed maximum, falling back to ~p",
[MemLimit, ?MAX_VM_MEMORY_HIGH_WATERMARK]
),
?MAX_VM_MEMORY_HIGH_WATERMARK;
parse_mem_limit(MemLimit) ->
rabbit_log:warning(
- "Memory high watermark of ~p is invalid, defaulting to ~p~n",
+ "Memory high watermark of ~p is invalid, defaulting to ~p",
[MemLimit, ?DEFAULT_VM_MEMORY_HIGH_WATERMARK]
),
?DEFAULT_VM_MEMORY_HIGH_WATERMARK.
@@ -406,7 +420,7 @@ internal_update(State0 = #state{memory_limit = MemLimit,
emit_update_info(AlarmState, MemUsed, MemLimit) ->
rabbit_log:info(
- "vm_memory_high_watermark ~p. Memory used:~p allowed:~p~n",
+ "vm_memory_high_watermark ~p. Memory used:~p allowed:~p",
[AlarmState, MemUsed, MemLimit]).
%% According to https://msdn.microsoft.com/en-us/library/aa366778(VS.85).aspx
diff --git a/deps/rabbit_common/src/worker_pool.erl b/deps/rabbit_common/src/worker_pool.erl
index f81e924653..663ac99267 100644
--- a/deps/rabbit_common/src/worker_pool.erl
+++ b/deps/rabbit_common/src/worker_pool.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(worker_pool).
diff --git a/deps/rabbit_common/src/worker_pool_sup.erl b/deps/rabbit_common/src/worker_pool_sup.erl
index 96dbbb2357..d3f53d2e2a 100644
--- a/deps/rabbit_common/src/worker_pool_sup.erl
+++ b/deps/rabbit_common/src/worker_pool_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(worker_pool_sup).
diff --git a/deps/rabbit_common/src/worker_pool_worker.erl b/deps/rabbit_common/src/worker_pool_worker.erl
index 79436e0773..b90a878225 100644
--- a/deps/rabbit_common/src/worker_pool_worker.erl
+++ b/deps/rabbit_common/src/worker_pool_worker.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(worker_pool_worker).
diff --git a/deps/rabbit_common/test/gen_server2_test_server.erl b/deps/rabbit_common/test/gen_server2_test_server.erl
index 0d68df8f7e..0bc8d360e9 100644
--- a/deps/rabbit_common/test/gen_server2_test_server.erl
+++ b/deps/rabbit_common/test/gen_server2_test_server.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(gen_server2_test_server).
diff --git a/deps/rabbit_common/test/rabbit_env_SUITE.erl b/deps/rabbit_common/test/rabbit_env_SUITE.erl
index a881097e6b..1ca7c5ca96 100644
--- a/deps/rabbit_common/test/rabbit_env_SUITE.erl
+++ b/deps/rabbit_common/test/rabbit_env_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_env_SUITE).
@@ -28,9 +28,13 @@
check_RABBITMQ_ADVANCED_CONFIG_FILE/1,
check_RABBITMQ_CONFIG_FILE/1,
check_RABBITMQ_CONFIG_FILES/1,
+ check_RABBITMQ_DEFAULT_PASS/1,
+ check_RABBITMQ_DEFAULT_USER/1,
+ check_RABBITMQ_DEFAULT_VHOST/1,
check_RABBITMQ_DIST_PORT/1,
check_RABBITMQ_ENABLED_PLUGINS/1,
check_RABBITMQ_ENABLED_PLUGINS_FILE/1,
+ check_RABBITMQ_ERLANG_COOKIE/1,
check_RABBITMQ_FEATURE_FLAGS_FILE/1,
check_RABBITMQ_KEEP_PID_FILE_ON_EXIT/1,
check_RABBITMQ_LOG/1,
@@ -69,9 +73,13 @@ all() ->
check_RABBITMQ_ADVANCED_CONFIG_FILE,
check_RABBITMQ_CONFIG_FILE,
check_RABBITMQ_CONFIG_FILES,
+ check_RABBITMQ_DEFAULT_PASS,
+ check_RABBITMQ_DEFAULT_USER,
+ check_RABBITMQ_DEFAULT_VHOST,
check_RABBITMQ_DIST_PORT,
check_RABBITMQ_ENABLED_PLUGINS,
check_RABBITMQ_ENABLED_PLUGINS_FILE,
+ check_RABBITMQ_ERLANG_COOKIE,
check_RABBITMQ_FEATURE_FLAGS_FILE,
check_RABBITMQ_KEEP_PID_FILE_ON_EXIT,
check_RABBITMQ_LOG,
@@ -167,8 +175,12 @@ check_default_values(_) ->
amqp_ipaddr => default,
amqp_tcp_port => default,
conf_env_file => default,
+ default_user => default,
+ default_pass => default,
+ default_vhost => default,
enabled_plugins => default,
enabled_plugins_file => default,
+ erlang_cookie => default,
erlang_dist_tcp_port => default,
feature_flags_file => default,
forced_feature_flags_on_init => RFFOrigin,
@@ -207,8 +219,12 @@ check_default_values(_) ->
data_dir => "/var/lib/rabbitmq",
dbg_mods => [],
dbg_output => stdout,
+ default_user => undefined,
+ default_pass => undefined,
+ default_vhost => undefined,
enabled_plugins => undefined,
enabled_plugins_file => "/etc/rabbitmq/enabled_plugins",
+ erlang_cookie => undefined,
erlang_dist_tcp_port => 25672,
feature_flags_file =>
"/var/lib/rabbitmq/mnesia/" ++ NodeS ++ "-feature_flags",
@@ -256,8 +272,12 @@ check_default_values(_) ->
data_dir => "%APPDATA%/RabbitMQ",
dbg_mods => [],
dbg_output => stdout,
+ default_user => undefined,
+ default_pass => undefined,
+ default_vhost => undefined,
enabled_plugins => undefined,
enabled_plugins_file => "%APPDATA%/RabbitMQ/enabled_plugins",
+ erlang_cookie => undefined,
erlang_dist_tcp_port => 25672,
feature_flags_file =>
"%APPDATA%/RabbitMQ/db/" ++ NodeS ++ "-feature_flags",
@@ -332,10 +352,11 @@ check_values_from_reachable_remote_node(Config) ->
{atom_to_list(?FUNCTION_NAME), "localhost"}),
NodeS = atom_to_list(Node),
true = os:putenv("RABBITMQ_NODENAME", NodeS),
- RabbitCommonEbinDir = filename:dirname(code:which(rabbit_env)),
Args = ["-noinput",
"-sname", atom_to_list(Node),
- "-pa", RabbitCommonEbinDir,
+ "-pa", filename:dirname(code:which(rabbit_env)),
+ "-pa", filename:dirname(code:where_is_file("rabbit_common.app")),
+ "-pa", filename:dirname(code:which(rabbit)),
"-pa", RabbitEbinDir,
"-mnesia", "dir",
rabbit_misc:format("~p", [MnesiaDir]),
@@ -379,8 +400,12 @@ check_values_from_reachable_remote_node(Config) ->
amqp_ipaddr => default,
amqp_tcp_port => default,
conf_env_file => default,
+ default_user => default,
+ default_pass => default,
+ default_vhost => default,
enabled_plugins => default,
enabled_plugins_file => remote_node,
+ erlang_cookie => default,
erlang_dist_tcp_port => default,
feature_flags_file => remote_node,
forced_feature_flags_on_init => RFFOrigin,
@@ -419,8 +444,12 @@ check_values_from_reachable_remote_node(Config) ->
data_dir => "/var/lib/rabbitmq",
dbg_mods => [],
dbg_output => stdout,
+ default_user => undefined,
+ default_pass => undefined,
+ default_vhost => undefined,
enabled_plugins => undefined,
enabled_plugins_file => EnabledPluginsFile,
+ erlang_cookie => undefined,
erlang_dist_tcp_port => 25672,
feature_flags_file => FeatureFlagsFile,
forced_feature_flags_on_init => RFFValue,
@@ -496,8 +525,12 @@ check_values_from_offline_remote_node(_) ->
amqp_ipaddr => default,
amqp_tcp_port => default,
conf_env_file => default,
+ default_user => default,
+ default_pass => default,
+ default_vhost => default,
enabled_plugins => default,
enabled_plugins_file => default,
+ erlang_cookie => default,
erlang_dist_tcp_port => default,
feature_flags_file => default,
forced_feature_flags_on_init => RFFOrigin,
@@ -536,8 +569,12 @@ check_values_from_offline_remote_node(_) ->
data_dir => "/var/lib/rabbitmq",
dbg_mods => [],
dbg_output => stdout,
+ default_user => undefined,
+ default_pass => undefined,
+ default_vhost => undefined,
enabled_plugins => undefined,
enabled_plugins_file => undefined,
+ erlang_cookie => undefined,
erlang_dist_tcp_port => 25672,
feature_flags_file => undefined,
forced_feature_flags_on_init => RFFValue,
@@ -734,6 +771,24 @@ check_RABBITMQ_CONFIG_FILES(_) ->
Value1, Value1,
Value2, Value2).
+check_RABBITMQ_DEFAULT_PASS(_) ->
+ Value1 = random_string(),
+ check_variable("RABBITMQ_DEFAULT_PASS",
+ default_pass,
+ Value1, list_to_binary(Value1)).
+
+check_RABBITMQ_DEFAULT_USER(_) ->
+ Value1 = random_string(),
+ check_variable("RABBITMQ_DEFAULT_USER",
+ default_user,
+ Value1, list_to_binary(Value1)).
+
+check_RABBITMQ_DEFAULT_VHOST(_) ->
+ Value1 = random_string(),
+ check_variable("RABBITMQ_DEFAULT_VHOST",
+ default_vhost,
+ Value1, list_to_binary(Value1)).
+
check_RABBITMQ_DIST_PORT(_) ->
Value1 = random_int(),
Value2 = random_int(),
@@ -766,6 +821,12 @@ check_RABBITMQ_ENABLED_PLUGINS_FILE(_) ->
Value1, Value1,
Value2, Value2).
+check_RABBITMQ_ERLANG_COOKIE(_) ->
+ Value1 = random_atom(),
+ check_variable("RABBITMQ_ERLANG_COOKIE",
+ erlang_cookie,
+ atom_to_list(Value1), Value1).
+
check_RABBITMQ_FEATURE_FLAGS_FILE(_) ->
Value1 = random_string(),
check_variable("RABBITMQ_FEATURE_FLAGS_FILE",
@@ -1075,24 +1136,57 @@ check_parse_conf_env_file_output(_) ->
)),
?assertEqual(
#{"UNQUOTED" => "a",
+ "UNICODE" => [43, 43, 32, 1550, 32],
"SINGLE_QUOTED" => "b",
"DOUBLE_QUOTED" => "c",
"SINGLE_DOLLAR" => "d"},
rabbit_env:parse_conf_env_file_output2(
- ["UNQUOTED=a",
+ %% a relatively rarely used Unicode character
+ ["++ ØŽ ",
+ "UNQUOTED=a",
+ "UNICODE='++ ØŽ '",
"SINGLE_QUOTED='b'",
"DOUBLE_QUOTED=\"c\"",
"SINGLE_DOLLAR=$'d'"],
#{}
)),
?assertEqual(
+ #{"DOUBLE_QUOTED" => "\\' \" \\v",
+ "SINGLE_DOLLAR" => "' \" \\ \007 z z z z"},
+ rabbit_env:parse_conf_env_file_output2(
+ ["DOUBLE_QUOTED=\"\\' \\\" \\v\"",
+ "SINGLE_DOLLAR=$'\\' \\\" \\\\ \\a \\172 \\x7a \\u007A \\U0000007a'"
+ ],
+ #{}
+ )),
+ ?assertEqual(
#{"A" => "a",
"B" => "b",
- "MULTI_LINE" => "\n'foobar'"},
+ "SINGLE_QUOTED_MULTI_LINE" => "\n'foobar'",
+ "DOUBLE_QUOTED_MULTI_LINE" => "Line1\nLine2"},
rabbit_env:parse_conf_env_file_output2(
["A=a",
- "MULTI_LINE='",
+ "SINGLE_QUOTED_MULTI_LINE='",
"'\"'\"'foobar'\"'\"",
+ "DOUBLE_QUOTED_MULTI_LINE=\"Line1",
+ "Line\\",
+ "2\"",
"B=b"],
#{}
+ )),
+ ?assertEqual(
+ #{"shellHook" =>
+ "\n"
+ "function isShellInteractive {\n"
+ " # shell is interactive if $- contains 'i'\n"
+ " [[ $- == *i* ]]\n"
+ "}\n"},
+ rabbit_env:parse_conf_env_file_output2(
+ ["shellHook='",
+ "function isShellInteractive {",
+ " # shell is interactive if $- contains '\\''i'\\''",
+ " [[ $- == *i* ]]",
+ "}",
+ "'"],
+ #{}
)).
diff --git a/deps/rabbit_common/test/supervisor2_SUITE.erl b/deps/rabbit_common/test/supervisor2_SUITE.erl
index 7b89363999..3ac62c151f 100644
--- a/deps/rabbit_common/test/supervisor2_SUITE.erl
+++ b/deps/rabbit_common/test/supervisor2_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(supervisor2_SUITE).
diff --git a/deps/rabbit_common/test/unit_SUITE.erl b/deps/rabbit_common/test/unit_SUITE.erl
index 925155211f..f00df8787b 100644
--- a/deps/rabbit_common/test/unit_SUITE.erl
+++ b/deps/rabbit_common/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
@@ -16,6 +16,10 @@
-compile(export_all).
+%% This cipher is listed as supported, but doesn't actually work.
+%% OTP bug: https://bugs.erlang.org/browse/ERL-1478
+-define(SKIPPED_CIPHERS, [aes_ige256]).
+
all() ->
[
{group, parallel_tests},
@@ -29,6 +33,8 @@ groups() ->
data_coercion_to_proplist,
data_coercion_to_list,
data_coercion_to_map,
+ data_coercion_atomize_keys_proplist,
+ data_coercion_atomize_keys_map,
pget,
encrypt_decrypt,
encrypt_decrypt_term,
@@ -38,7 +44,8 @@ groups() ->
frame_encoding_does_not_fail_with_empty_binary_payload,
amqp_table_conversion,
name_type,
- get_erl_path
+ get_erl_path,
+ date_time_parse_duration
]},
{parse_mem_limit, [parallel], [
parse_mem_limit_relative_exactly_max,
@@ -295,6 +302,16 @@ data_coercion_to_proplist(_Config) ->
?assertEqual([{a, 1}], rabbit_data_coercion:to_proplist([{a, 1}])),
?assertEqual([{a, 1}], rabbit_data_coercion:to_proplist(#{a => 1})).
+data_coercion_atomize_keys_map(_Config) ->
+ A = #{a => 1, b => 2, c => 3},
+ B = rabbit_data_coercion:atomize_keys(#{a => 1, "b" => 2, <<"c">> => 3}),
+ ?assertEqual(A, B).
+
+data_coercion_atomize_keys_proplist(_Config) ->
+ A = [{a, 1}, {b, 2}, {c, 3}],
+ B = rabbit_data_coercion:atomize_keys([{a, 1}, {"b", 2}, {<<"c">>, 3}]),
+ ?assertEqual(lists:usort(A), lists:usort(B)).
+
data_coercion_to_list(_Config) ->
?assertEqual([{a, 1}], rabbit_data_coercion:to_list([{a, 1}])),
?assertEqual([{a, 1}], rabbit_data_coercion:to_list(#{a => 1})).
@@ -318,7 +335,7 @@ pid_decompose_compose(_Config) ->
encrypt_decrypt(_Config) ->
%% Take all available block ciphers.
Hashes = rabbit_pbe:supported_hashes(),
- Ciphers = rabbit_pbe:supported_ciphers(),
+ Ciphers = rabbit_pbe:supported_ciphers() -- ?SKIPPED_CIPHERS,
%% For each cipher, try to encrypt and decrypt data sizes from 0 to 64 bytes
%% with a random passphrase.
_ = [begin
@@ -336,7 +353,7 @@ encrypt_decrypt(_Config) ->
encrypt_decrypt_term(_Config) ->
%% Take all available block ciphers.
Hashes = rabbit_pbe:supported_hashes(),
- Ciphers = rabbit_pbe:supported_ciphers(),
+ Ciphers = rabbit_pbe:supported_ciphers() -- ?SKIPPED_CIPHERS,
%% Different Erlang terms to try encrypting.
DataSet = [
10000,
@@ -444,3 +461,23 @@ get_erl_path(_) ->
?assertNotMatch(nomatch, string:find(Exe, "erl"))
end,
ok.
+
+date_time_parse_duration(_) ->
+ ?assertEqual(
+ {ok, [{sign, "+"}, {years, 6}, {months, 3}, {days, 1}, {hours, 1}, {minutes, 1}, {seconds, 1}]},
+ rabbit_date_time:parse_duration("+P6Y3M1DT1H1M1.1S")
+ ),
+ ?assertEqual(
+ {ok, [{sign, []}, {years, 0}, {months, 0}, {days, 0}, {hours, 0}, {minutes, 6}, {seconds, 0}]},
+ rabbit_date_time:parse_duration("PT6M")
+ ),
+ ?assertEqual(
+ {ok, [{sign, []}, {years, 0}, {months, 0}, {days, 0}, {hours, 0}, {minutes, 10}, {seconds, 30}]},
+ rabbit_date_time:parse_duration("PT10M30S")
+ ),
+ ?assertEqual(
+ {ok, [{sign, []}, {years, 0}, {months, 0}, {days, 5}, {hours, 8}, {minutes, 0}, {seconds, 0}]},
+ rabbit_date_time:parse_duration("P5DT8H")
+ ),
+ ?assertEqual(error, rabbit_date_time:parse_duration("foo")),
+ ok. \ No newline at end of file
diff --git a/deps/rabbit_common/test/unit_priority_queue_SUITE.erl b/deps/rabbit_common/test/unit_priority_queue_SUITE.erl
index 8d58c72f10..f650fc84d7 100644
--- a/deps/rabbit_common/test/unit_priority_queue_SUITE.erl
+++ b/deps/rabbit_common/test/unit_priority_queue_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_priority_queue_SUITE).
diff --git a/deps/rabbit_common/test/worker_pool_SUITE.erl b/deps/rabbit_common/test/worker_pool_SUITE.erl
index a50104f6c7..c0df03062e 100644
--- a/deps/rabbit_common/test/worker_pool_SUITE.erl
+++ b/deps/rabbit_common/test/worker_pool_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(worker_pool_SUITE).
diff --git a/deps/rabbitmq_amqp1_0/BUILD.bazel b/deps/rabbitmq_amqp1_0/BUILD.bazel
new file mode 100644
index 0000000000..bd260f0f6c
--- /dev/null
+++ b/deps/rabbitmq_amqp1_0/BUILD.bazel
@@ -0,0 +1,107 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_ENV = """[
+ {default_user, "guest"},
+ {default_vhost, <<"/">>},
+ {protocol_strict_mode, false}
+ ]"""
+
+APP_NAME = "rabbitmq_amqp1_0"
+
+APP_DESCRIPTION = "AMQP 1.0 support for RabbitMQ"
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_cli:rabbitmqctl",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/amqp10_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(
+ size = "small",
+ tags = ["xref"],
+)
+
+dialyze(
+ size = "medium",
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_amqp1_0"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "amqp10_client_SUITE",
+ size = "medium",
+ runtime_deps = [
+ "//deps/amqp10_client:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "command_SUITE",
+ size = "medium",
+ deps = [
+ "//deps/amqp10_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "proxy_protocol_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ flaky = True,
+ tags = [
+ "dotnet",
+ ],
+ test_env = {
+ "TMPDIR": "$TEST_TMPDIR",
+ },
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ size = "small",
+ deps = [
+ "//deps/amqp10_common:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_amqp1_0/CONTRIBUTING.md b/deps/rabbitmq_amqp1_0/CONTRIBUTING.md
index 45bbcbe62e..339d097deb 100644
--- a/deps/rabbitmq_amqp1_0/CONTRIBUTING.md
+++ b/deps/rabbitmq_amqp1_0/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_amqp1_0/Makefile b/deps/rabbitmq_amqp1_0/Makefile
index 5d9eddbb24..e764ed1f4a 100644
--- a/deps/rabbitmq_amqp1_0/Makefile
+++ b/deps/rabbitmq_amqp1_0/Makefile
@@ -29,8 +29,8 @@ $(PROJECT).d:: $(EXTRA_SOURCES)
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
# --------------------------------------------------------------------
# Framing sources generation.
diff --git a/deps/rabbitmq_amqp1_0/erlang.mk b/deps/rabbitmq_amqp1_0/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_amqp1_0/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_amqp1_0/rabbitmq-components.mk b/deps/rabbitmq_amqp1_0/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_amqp1_0/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_amqp1_0/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListAmqp10ConnectionsCommand.erl b/deps/rabbitmq_amqp1_0/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListAmqp10ConnectionsCommand.erl
index 77c9c46d5f..24a60b2ce1 100644
--- a/deps/rabbitmq_amqp1_0/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListAmqp10ConnectionsCommand.erl
+++ b/deps/rabbitmq_amqp1_0/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListAmqp10ConnectionsCommand.erl
@@ -2,13 +2,22 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ListAmqp10ConnectionsCommand').
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
-include("rabbit_amqp1_0.hrl").
+-ignore_xref([
+ {'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Core.Helpers', nodes_in_cluster, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', prepare_info_keys, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.RpcStream', receive_list_items, 7},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', validate_info_keys, 2},
+ {'Elixir.Enum', join, 2}
+]).
+
-export([formatter/0,
scopes/0,
switches/0,
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0.erl
index c130e1a33f..c02c28d827 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0).
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl
index de896b44ab..e22461695d 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_channel).
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl
index 6ae8615589..a38557e3e1 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_incoming_link).
@@ -132,7 +132,7 @@ transfer(#'v1_0.transfer'{delivery_id = DeliveryId0,
send_settle_mode = SSM,
recv_settle_mode = RSM} = Link, BCh) ->
MsgBin = iolist_to_binary(lists:reverse([MsgPart | MsgAcc])),
- ?DEBUG("Inbound content:~n ~p~n",
+ ?DEBUG("Inbound content:~n ~p",
[[amqp10_framing:pprint(Section) ||
Section <- amqp10_framing:decode_bin(MsgBin)]]),
{MsgRKey, Msg} = rabbit_amqp1_0_message:assemble(MsgBin),
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl
index 2ddfec4dbf..818c95dd76 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_link_util).
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl
index d3425bfa17..a3eccc7587 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_message).
@@ -12,6 +12,7 @@
-define(PROPERTIES_HEADER, <<"x-amqp-1.0-properties">>).
-define(APP_PROPERTIES_HEADER, <<"x-amqp-1.0-app-properties">>).
-define(MESSAGE_ANNOTATIONS_HEADER, <<"x-amqp-1.0-message-annotations">>).
+-define(STREAM_OFFSET_HEADER, <<"x-stream-offset">>).
-define(FOOTER, <<"x-amqp-1.0-footer">>).
-include_lib("amqp_client/include/amqp_client.hrl").
@@ -201,11 +202,29 @@ annotated_message(RKey, #'basic.deliver'{redelivered = Redelivered},
first_acquirer = not Redelivered,
delivery_count = undefined},
HeadersBin = amqp10_framing:encode_bin(Header10),
- MsgAnnoBin =
+ MsgAnnoBin0 =
case table_lookup(Headers, ?MESSAGE_ANNOTATIONS_HEADER) of
undefined -> <<>>;
{_, MABin} -> MABin
- end,
+ end,
+ MsgAnnoBin =
+ case table_lookup(Headers, ?STREAM_OFFSET_HEADER) of
+ undefined ->
+ MsgAnnoBin0;
+ {_, StreamOffset} when is_integer(StreamOffset) ->
+ case amqp10_framing:decode_bin(MsgAnnoBin0) of
+ [#'v1_0.message_annotations'{content = C0} = MA] ->
+ Contents = map_add(utf8, ?STREAM_OFFSET_HEADER,
+ ulong, StreamOffset, C0),
+ amqp10_framing:encode_bin(
+ MA#'v1_0.message_annotations'{content = Contents});
+ [] ->
+ Contents = map_add(utf8, ?STREAM_OFFSET_HEADER,
+ ulong, StreamOffset, []),
+ amqp10_framing:encode_bin(
+ #'v1_0.message_annotations'{content = Contents})
+ end
+ end,
PropsBin =
case table_lookup(Headers, ?PROPERTIES_HEADER) of
{_, Props10Bin} ->
@@ -259,3 +278,7 @@ wrap(Type, Val) ->
table_lookup(undefined, _) -> undefined;
table_lookup(Headers, Header) -> rabbit_misc:table_lookup(Headers, Header).
+map_add(_T, _Key, _Type, undefined, Acc) ->
+ Acc;
+map_add(KeyType, Key, Type, Value, Acc) ->
+ [{wrap(KeyType, Key), wrap(Type, Value)} | Acc].
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl
index c83a152f40..2c8da853c7 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl
@@ -2,12 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_outgoing_link).
--export([attach/3, delivery/6, transferred/3, credit_drained/3, flow/3]).
+-export([attach/3, detach/3, delivery/6, transferred/3, credit_drained/3, flow/3]).
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_amqp1_0.hrl").
@@ -24,6 +24,11 @@
default_outcome,
route_state}).
+detach(#'v1_0.detach'{handle = Handle}, BCh,_Link) ->
+ CTag = handle_to_ctag(Handle),
+ rabbit_amqp1_0_channel:call(BCh, #'basic.cancel'{consumer_tag = CTag}),
+ ok.
+
attach(#'v1_0.attach'{name = Name,
handle = Handle,
source = Source,
@@ -46,6 +51,8 @@ attach(#'v1_0.attach'{name = Name,
DCh) of
{ok, Source1, OutgoingLink = #outgoing_link{queue = QueueName}} ->
CTag = handle_to_ctag(Handle),
+ Args = source_filters_to_consumer_args(Source1),
+
case rabbit_amqp1_0_channel:subscribe(
BCh, #'basic.consume'{
queue = QueueName,
@@ -56,7 +63,8 @@ attach(#'v1_0.attach'{name = Name,
no_ack = false,
%% TODO exclusive?
exclusive = false,
- arguments = [{<<"x-credit">>, table,
+ arguments = Args ++
+ [{<<"x-credit">>, table,
[{<<"credit">>, long, 0},
{<<"drain">>, bool, false}]}]},
self()) of
@@ -140,6 +148,7 @@ default(Thing, _Default) -> Thing.
ensure_source(Source = #'v1_0.source'{address = Address,
dynamic = Dynamic,
durable = Durable,
+ filter = _Filters,
%% TODO
expiry_policy = _ExpiryPolicy,
%% TODO
@@ -199,7 +208,7 @@ delivery(Deliver = #'basic.deliver'{delivery_tag = DeliveryTag,
batchable = false},
Msg1_0 = rabbit_amqp1_0_message:annotated_message(
RKey, Deliver, Msg),
- ?DEBUG("Outbound content:~n ~p~n",
+ ?DEBUG("Outbound content:~n ~p",
[[amqp10_framing:pprint(Section) ||
Section <- amqp10_framing:decode_bin(
iolist_to_binary(Msg1_0))]]),
@@ -238,3 +247,18 @@ transferred(DeliveryTag, Channel,
ok
end,
Link#outgoing_link{delivery_count = serial_add(Count, 1)}.
+
+source_filters_to_consumer_args(#'v1_0.source'{filter = {map, KVList}}) ->
+ Key = {symbol, <<"rabbitmq:stream-offset-spec">>},
+ case lists:keyfind(Key, 1, KVList) of
+ {_, {timestamp, Ts}} ->
+ [{<<"x-stream-offset">>, timestamp, Ts div 1000}]; %% 0.9.1 uses second based timestamps
+ {_, {utf8, Spec}} ->
+ [{<<"x-stream-offset">>, longstr, Spec}]; %% next, last, first and "10m" etc
+ {_, {_, Offset}} when is_integer(Offset) ->
+ [{<<"x-stream-offset">>, long, Offset}]; %% integer offset
+ _ ->
+ []
+ end;
+source_filters_to_consumer_args(_Source) ->
+ [].
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl
index 1d4a5050e1..20a4584fbb 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_reader).
@@ -84,7 +84,7 @@ system_terminate(Reason, _Parent, _Deb, _State) ->
system_code_change(Misc, _Module, _OldVsn, _Extra) ->
{ok, Misc}.
-conserve_resources(Pid, Source, Conserve) ->
+conserve_resources(Pid, Source, {_, Conserve, _}) ->
Pid ! {conserve_resources, Source, Conserve},
ok.
@@ -104,8 +104,12 @@ recvloop(Deb, State = #v1{connection_state = blocked}) ->
mainloop(Deb, State);
recvloop(Deb, State = #v1{sock = Sock, recv_len = RecvLen, buf_len = BufLen})
when BufLen < RecvLen ->
- ok = rabbit_net:setopts(Sock, [{active, once}]),
- mainloop(Deb, State#v1{pending_recv = true});
+ case rabbit_net:setopts(Sock, [{active, once}]) of
+ ok ->
+ mainloop(Deb, State#v1{pending_recv = true});
+ {error, Reason} ->
+ throw({inet_error, Reason})
+ end;
recvloop(Deb, State = #v1{recv_len = RecvLen, buf = Buf, buf_len = BufLen}) ->
{Data, Rest} = split_binary(case Buf of
[B] -> B;
@@ -204,7 +208,7 @@ switch_callback(State, Callback, Length) ->
terminate(Reason, State) when ?IS_RUNNING(State) ->
{normal, handle_exception(State, 0,
{?V_1_0_AMQP_ERROR_INTERNAL_ERROR,
- "Connection forced: ~p~n", [Reason]})};
+ "Connection forced: ~p", [Reason]})};
terminate(_Reason, State) ->
{force, State}.
@@ -247,7 +251,7 @@ handle_dependent_exit(ChPid, Reason, State) ->
{Channel, uncontrolled} ->
{RealReason, Trace} = Reason,
R = {?V_1_0_AMQP_ERROR_INTERNAL_ERROR,
- "Session error: ~p~n~p~n", [RealReason, Trace]},
+ "Session error: ~p~n~p", [RealReason, Trace]},
maybe_close(handle_exception(control_throttle(State), Channel, R))
end.
@@ -275,13 +279,13 @@ error_frame(Condition, Fmt, Args) ->
handle_exception(State = #v1{connection_state = closed}, Channel,
#'v1_0.error'{description = {utf8, Desc}}) ->
- rabbit_log_connection:error("AMQP 1.0 connection ~p (~p), channel ~p - error:~n~p~n",
+ rabbit_log_connection:error("Error on AMQP 1.0 connection ~p (~p), channel ~p:~n~p",
[self(), closed, Channel, Desc]),
State;
handle_exception(State = #v1{connection_state = CS}, Channel,
ErrorFrame = #'v1_0.error'{description = {utf8, Desc}})
when ?IS_RUNNING(State) orelse CS =:= closing ->
- rabbit_log_connection:error("AMQP 1.0 connection ~p (~p), channel ~p - error:~n~p~n",
+ rabbit_log_connection:error("Error on AMQP 1.0 connection ~p (~p), channel ~p:~n~p",
[self(), CS, Channel, Desc]),
%% TODO: session errors shouldn't force the connection to close
State1 = close_connection(State),
@@ -317,11 +321,11 @@ handle_1_0_frame(Mode, Channel, Payload, State) ->
%% section 2.8.15 in http://docs.oasis-open.org/amqp/core/v1.0/os/amqp-core-complete-v1.0-os.pdf
handle_exception(State, 0, error_frame(
?V_1_0_AMQP_ERROR_UNAUTHORIZED_ACCESS,
- "Access for user '~s' was refused: insufficient permissions~n", [Username]));
+ "Access for user '~s' was refused: insufficient permissions", [Username]));
_:Reason:Trace ->
handle_exception(State, 0, error_frame(
?V_1_0_AMQP_ERROR_INTERNAL_ERROR,
- "Reader error: ~p~n~p~n",
+ "Reader error: ~p~n~p",
[Reason, Trace]))
end.
@@ -346,12 +350,12 @@ handle_1_0_frame0(Mode, Channel, Payload, State) ->
parse_1_0_frame(Payload, _Channel) ->
{PerfDesc, Rest} = amqp10_binary_parser:parse(Payload),
Perf = amqp10_framing:decode(PerfDesc),
- ?DEBUG("Channel ~p ->~n~p~n~s~n",
+ ?DEBUG("Channel ~p ->~n~p~n~s",
[_Channel, amqp10_framing:pprint(Perf),
case Rest of
<<>> -> <<>>;
_ -> rabbit_misc:format(
- " followed by ~p bytes of content~n", [size(Rest)])
+ " followed by ~p bytes of content", [size(Rest)])
end]),
case Rest of
<<>> -> Perf;
@@ -789,8 +793,8 @@ socket_info(Get, Select, #v1{sock = Sock}) ->
{error, _} -> ''
end.
-ssl_info(F, #v1{sock = Sock}) ->
- case rabbit_net:ssl_info(Sock) of
+ssl_info(F, #v1{sock = Sock, proxy_socket = ProxySock}) ->
+ case rabbit_net:proxy_ssl_info(Sock, ProxySock) of
nossl -> '';
{error, _} -> '';
{ok, Items} ->
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl
index 6fbd5bb4d1..6b69f38d98 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_session).
@@ -253,7 +253,7 @@ settle(Disp = #'v1_0.disposition'{first = First0,
Map;
{value, Entry} ->
#outgoing_delivery{delivery_tag = DeliveryTag } = Entry,
- ?DEBUG("Settling ~p with ~p~n", [Delivery, _Outcome]),
+ ?DEBUG("Settling ~p with ~p", [Delivery, _Outcome]),
UpstreamAckFun(DeliveryTag),
gb_trees:delete(Delivery, Map)
end
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl
index 5fcb720d3d..b2a466785b 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_session_process).
@@ -57,12 +57,12 @@ init({Channel, ReaderPid, WriterPid, #user{username = Username}, VHost,
session = rabbit_amqp1_0_session:init(Channel)
}};
{error, Reason} ->
- rabbit_log:warning("Closing session for connection ~p:~n~p~n",
+ rabbit_log:warning("Closing session for connection ~p:~n~p",
[ReaderPid, Reason]),
{stop, Reason}
end;
{error, Reason} ->
- rabbit_log:warning("Closing session for connection ~p:~n~p~n",
+ rabbit_log:warning("Closing session for connection ~p:~n~p",
[ReaderPid, Reason]),
{stop, Reason}
end.
@@ -85,6 +85,11 @@ handle_info(#'basic.consume_ok'{}, State) ->
%% Handled above
{noreply, State};
+handle_info(#'basic.cancel_ok'{}, State) ->
+ %% just ignore this for now,
+ %% At some point we should send the detach here but then we'd need to track
+ %% consumer tags -> link handle somewhere
+ {noreply, State};
handle_info({#'basic.deliver'{ consumer_tag = ConsumerTag,
delivery_tag = DeliveryTag } = Deliver, Msg},
State = #state{frame_max = FrameMax,
@@ -158,7 +163,7 @@ handle_info({'DOWN', _MRef, process, Ch, Reason},
io_lib:format("~w", [Reason])))}}
end,
End = #'v1_0.end'{ error = Error },
- rabbit_log:warning("Closing session for connection ~p:~n~p~n",
+ rabbit_log:warning("Closing session for connection ~p:~n~p",
[ReaderPid, Reason]),
ok = rabbit_amqp1_0_writer:send_command_sync(Sock, End),
{stop, normal, State};
@@ -186,7 +191,7 @@ handle_cast({frame, Frame, FlowPid},
catch exit:Reason = #'v1_0.error'{} ->
%% TODO shut down nicely like rabbit_channel
End = #'v1_0.end'{ error = Reason },
- rabbit_log:warning("Closing session for connection ~p:~n~p~n",
+ rabbit_log:warning("Closing session for connection ~p:~n~p",
[ReaderPid, Reason]),
ok = rabbit_amqp1_0_writer:send_command_sync(Sock, End),
{stop, normal, State};
@@ -286,7 +291,7 @@ handle_control(#'v1_0.disposition'{state = Outcome,
protocol_error(
?V_1_0_AMQP_ERROR_INVALID_FIELD,
"Unrecognised state: ~p~n"
- "Disposition was: ~p~n", [Outcome, Disp])
+ "Disposition was: ~p", [Outcome, Disp])
end)
end,
case rabbit_amqp1_0_session:settle(Disp, session(State), AckFun) of
@@ -294,11 +299,19 @@ handle_control(#'v1_0.disposition'{state = Outcome,
{Reply, Session1} -> {reply, Reply, state(Session1, State)}
end;
-handle_control(#'v1_0.detach'{ handle = Handle }, State) ->
+handle_control(#'v1_0.detach'{handle = Handle} = Detach,
+ #state{backing_channel = BCh} = State) ->
%% TODO keep the state around depending on the lifetime
%% TODO outgoing links?
+ case get({out, Handle}) of
+ undefined ->
+ ok;
+ Link ->
+ erase({out, Handle}),
+ ok = rabbit_amqp1_0_outgoing_link:detach(Detach, BCh, Link)
+ end,
erase({in, Handle}),
- {reply, #'v1_0.detach'{ handle = Handle }, State};
+ {reply, #'v1_0.detach'{handle = Handle}, State};
handle_control(#'v1_0.end'{}, _State = #state{ writer_pid = Sock }) ->
ok = rabbit_amqp1_0_writer:send_command(Sock, #'v1_0.end'{}),
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl
index 5f2462e8a0..24fe177476 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_session_sup).
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl
index e427064af0..9462b860fd 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_session_sup_sup).
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl
index 9d9ad5044d..f8ea489241 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_util).
diff --git a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl
index 2921f929db..f9ab546a0a 100644
--- a/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl
+++ b/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp1_0_writer).
@@ -192,13 +192,13 @@ call(Pid, Msg) ->
%% Begin 1-0
assemble_frame(Channel, Performative, amqp10_framing) ->
- ?DEBUG("Channel ~p <-~n~p~n~n",
+ ?DEBUG("Channel ~p <-~n~p",
[Channel, amqp10_framing:pprint(Performative)]),
PerfBin = amqp10_framing:encode_bin(Performative),
amqp10_binary_generator:build_frame(Channel, PerfBin);
assemble_frame(Channel, Performative, rabbit_amqp1_0_sasl) ->
- ?DEBUG("Channel ~p <-~n~p~n~n",
+ ?DEBUG("Channel ~p <-~n~p",
[Channel, amqp10_framing:pprint(Performative)]),
PerfBin = amqp10_framing:encode_bin(Performative),
amqp10_binary_generator:build_frame(Channel,
@@ -211,7 +211,7 @@ assemble_frame(Channel, Performative, rabbit_amqp1_0_sasl) ->
assemble_frames(Channel, Performative, Content, _FrameMax,
amqp10_framing) ->
- ?DEBUG("Channel ~p <-~n~p~n followed by ~p bytes of content~n~n",
+ ?DEBUG("Channel ~p <-~n~p~n followed by ~p bytes of content",
[Channel, amqp10_framing:pprint(Performative),
iolist_size(Content)]),
PerfBin = amqp10_framing:encode_bin(Performative),
diff --git a/deps/rabbitmq_amqp1_0/test/amqp10_client_SUITE.erl b/deps/rabbitmq_amqp1_0/test/amqp10_client_SUITE.erl
index 922be93a85..939d7480db 100644
--- a/deps/rabbitmq_amqp1_0/test/amqp10_client_SUITE.erl
+++ b/deps/rabbitmq_amqp1_0/test/amqp10_client_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_client_SUITE).
diff --git a/deps/rabbitmq_amqp1_0/test/command_SUITE.erl b/deps/rabbitmq_amqp1_0/test/command_SUITE.erl
index 67528393e7..a107e8ddbc 100644
--- a/deps/rabbitmq_amqp1_0/test/command_SUITE.erl
+++ b/deps/rabbitmq_amqp1_0/test/command_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(command_SUITE).
diff --git a/deps/rabbitmq_amqp1_0/test/proxy_protocol_SUITE.erl b/deps/rabbitmq_amqp1_0/test/proxy_protocol_SUITE.erl
index fd0a927bcb..ec42a0d722 100644
--- a/deps/rabbitmq_amqp1_0/test/proxy_protocol_SUITE.erl
+++ b/deps/rabbitmq_amqp1_0/test/proxy_protocol_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(proxy_protocol_SUITE).
@@ -64,7 +64,7 @@ proxy_protocol(Config) ->
{ok, _Packet} = gen_tcp:recv(Socket, 0, ?TIMEOUT),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
gen_tcp:close(Socket),
ok.
@@ -81,7 +81,7 @@ proxy_protocol_tls(Config) ->
timer:sleep(1000),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
gen_tcp:close(Socket),
ok.
diff --git a/deps/rabbitmq_amqp1_0/test/system_SUITE.erl b/deps/rabbitmq_amqp1_0/test/system_SUITE.erl
index b5c92c26ad..24f63a267a 100644
--- a/deps/rabbitmq_amqp1_0/test/system_SUITE.erl
+++ b/deps/rabbitmq_amqp1_0/test/system_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
@@ -37,10 +37,15 @@ groups() ->
auth_failure,
access_failure,
access_failure_not_allowed,
- access_failure_send
+ access_failure_send,
+ streams
]},
{java, [], [
roundtrip
+ ]},
+ {streams, [
+ streams
+ ], [
]}
].
@@ -55,6 +60,22 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
Config.
+init_per_group(streams, Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ false ->
+ Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_nodename_suffix, Suffix},
+ {amqp10_client_library, dotnet}
+ ]),
+ rabbit_ct_helpers:run_setup_steps(Config1, [
+ fun build_dotnet_test_project/1
+ ] ++
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps());
+ _ ->
+ {skip, "stream tests are skipped in mixed mode"}
+ end;
init_per_group(Group, Config) ->
Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
Config1 = rabbit_ct_helpers:set_config(Config, [
@@ -117,6 +138,15 @@ roundtrip(Config) ->
{java, "RoundTripTest"}
]).
+streams(Config) ->
+ Ch = rabbit_ct_client_helpers:open_channel(Config, 0),
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"stream_q">>,
+ durable = true,
+ arguments = [{<<"x-queue-type">>, longstr, "stream"}]}),
+ run(Config, [
+ {dotnet, "streams"}
+ ]).
+
roundtrip_to_amqp_091(Config) ->
run(Config, [
{dotnet, "roundtrip_to_amqp_091"}
@@ -152,10 +182,6 @@ data_types(Config) ->
{dotnet, "data_types"}
]).
-%% at_most_once(Config) ->
-%% run(Config, [
-%% ]).
-
reject(Config) ->
run(Config, [
{dotnet, "reject"}
@@ -167,17 +193,50 @@ redelivery(Config) ->
]).
routing(Config) ->
+
+ StreamQT =
+ case rabbit_ct_broker_helpers:enable_feature_flag(Config, stream_queue) of
+ ok ->
+ <<"stream">>;
+ _ ->
+ %% if the feature flag could not be enabled we run the stream
+ %% routing test using a classc quue instead
+ ct:pal("stream feature flag could not be enabled"
+ "running stream tests against classic"),
+ <<"classic">>
+ end,
Ch = rabbit_ct_client_helpers:open_channel(Config, 0),
- amqp_channel:call(Ch, #'queue.declare'{queue = <<"transient_q">>,
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"transient_q">>,
durable = false}),
- amqp_channel:call(Ch, #'queue.declare'{queue = <<"durable_q">>,
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"durable_q">>,
durable = true}),
- amqp_channel:call(Ch, #'queue.declare'{queue = <<"autodel_q">>,
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"quorum_q">>,
+ durable = true,
+ arguments = [{<<"x-queue-type">>, longstr, <<"quorum">>}]}),
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"stream_q">>,
+ durable = true,
+ arguments = [{<<"x-queue-type">>, longstr, StreamQT}]}),
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"stream_q2">>,
+ durable = true,
+ arguments = [{<<"x-queue-type">>, longstr, StreamQT}]}),
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"autodel_q">>,
auto_delete = true}),
run(Config, [
{dotnet, "routing"}
]).
+%% TODO: this tests doesn't test anything that the standard routing test
+%% already does. We should test stream specific things here like attaching
+%% to a given offset
+stream_interop_basics(Config) ->
+ Ch = rabbit_ct_client_helpers:open_channel(Config, 0),
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"stream_q">>,
+ durable = true,
+ arguments = [{<<"x-queue-type">>, longstr, <<"stream">>}]}),
+ run(Config, [
+ {dotnet, "routing"}
+ ]).
+
invalid_routes(Config) ->
run(Config, [
{dotnet, "invalid_routes"}
diff --git a/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/Program.fs b/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/Program.fs
index 19aa1009dd..6f083c2309 100755
--- a/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/Program.fs
+++ b/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/Program.fs
@@ -75,6 +75,10 @@ module Test =
if a <> b then
failwith (sprintf "Expected: %A\r\nGot: %A" a b)
+ let assertNotNull a =
+ if a = null then
+ failwith (sprintf "Null not expected")
+
let assertTrue b =
if not b then
failwith (sprintf "Expected True got False!")
@@ -142,6 +146,43 @@ module Test =
assertEqual rtd.Properties.CorrelationId corr
()
+ let streams uri =
+ use c = connect uri
+ let name = "streams-test"
+ let address = "/amq/queue/stream_q2"
+ let sender = SenderLink(c.Session, name + "-sender" , address)
+ //for body in sampleTypes do
+ let body = "hi"B :> obj
+
+ let corr = "correlation"
+ new Message(body,
+ Properties = new Properties(CorrelationId = corr))
+ |> sender.Send
+ //TODO wait for settlement
+ let specs = [box("first");
+ box("last");
+ box("10m");
+ box(0)]
+ for spec in specs do
+ printfn "testing streams spec %A" spec
+ let filterSet = Map()
+ filterSet.Add(Symbol "rabbitmq:stream-offset-spec", spec)
+ let source = Source(Address = address,
+ FilterSet = filterSet)
+ let attach = Attach(Source = source)
+ let attached = new OnAttached (fun _ _ -> ())
+ let receiver = ReceiverLink(c.Session, Guid.NewGuid().ToString(), attach, attached)
+ receiver.SetCredit(100, true)
+ let rtd = receiver.Receive()
+ assertNotNull rtd
+ assertTrue (rtd.MessageAnnotations.Map.Count = 1)
+ let (result, _) = rtd.MessageAnnotations.Map.TryGetValue("x-stream-offset")
+ assertTrue result
+ assertEqual body rtd.Body
+ assertEqual rtd.Properties.CorrelationId corr
+ receiver.Close()
+ ()
+
open RabbitMQ.Client
let roundtrip_to_amqp_091 uri =
@@ -313,6 +354,8 @@ module Test =
* from the common_test suite. *)
"/amq/queue/transient_q", "/amq/queue/transient_q", "", true
"/amq/queue/durable_q", "/amq/queue/durable_q", "", true
+ "/amq/queue/quorum_q", "/amq/queue/quorum_q", "", true
+ "/amq/queue/stream_q", "/amq/queue/stream_q", "", true
"/amq/queue/autodel_q", "/amq/queue/autodel_q", "", true] do
let rnd = Random()
@@ -476,6 +519,9 @@ let main argv =
| [AsLower "invalid_routes"; uri] ->
invalidRoutes uri
0
+ | [AsLower "streams"; uri] ->
+ streams uri
+ 0
| _ ->
printfn "test %A not found. usage: <test> <uri>" argv
1
diff --git a/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/fsharp-tests.fsproj b/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/fsharp-tests.fsproj
index 85b60b9945..5596623580 100755
--- a/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/fsharp-tests.fsproj
+++ b/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/fsharp-tests.fsproj
@@ -1,14 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
- <TargetFramework>netcoreapp2.0</TargetFramework>
+ <TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<Compile Include="Program.fs" />
</ItemGroup>
<ItemGroup>
- <PackageReference Include="RabbitMQ.Client" Version="5.*" />
- <PackageReference Include="AmqpNetLite" Version="2.1.2" />
- <PackageReference Include="AmqpNetLite.Serialization" Version="2.1.2" />
+ <PackageReference Include="RabbitMQ.Client" Version="6.*" />
+ <PackageReference Include="AmqpNetLite" Version="2.4.1" />
+ <PackageReference Include="AmqpNetLite.Serialization" Version="2.4.1" />
</ItemGroup>
</Project>
diff --git a/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/global.json b/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/global.json
new file mode 100644
index 0000000000..8f2b1017af
--- /dev/null
+++ b/deps/rabbitmq_amqp1_0/test/system_SUITE_data/fsharp-tests/global.json
@@ -0,0 +1,5 @@
+{
+ "sdk": {
+ "version": "3.1"
+ }
+}
diff --git a/deps/rabbitmq_amqp1_0/test/system_SUITE_data/java-tests/pom.xml b/deps/rabbitmq_amqp1_0/test/system_SUITE_data/java-tests/pom.xml
index d8f01408e6..e49f07974b 100644
--- a/deps/rabbitmq_amqp1_0/test/system_SUITE_data/java-tests/pom.xml
+++ b/deps/rabbitmq_amqp1_0/test/system_SUITE_data/java-tests/pom.xml
@@ -32,16 +32,16 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
- <version>3.8.0</version>
+ <version>3.8.1</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.8</source>
+ <target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
- <version>2.22.1</version>
+ <version>2.22.2</version>
<configuration>
<systemPropertyVariables>
<rmq_broker_uri>${rmq_broker_uri}</rmq_broker_uri>
diff --git a/deps/rabbitmq_amqp1_0/test/unit_SUITE.erl b/deps/rabbitmq_amqp1_0/test/unit_SUITE.erl
index 0b10141a13..b5f166023d 100644
--- a/deps/rabbitmq_amqp1_0/test/unit_SUITE.erl
+++ b/deps/rabbitmq_amqp1_0/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
diff --git a/deps/rabbitmq_auth_backend_cache/BUILD.bazel b/deps/rabbitmq_auth_backend_cache/BUILD.bazel
new file mode 100644
index 0000000000..6a3dff9d77
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_cache/BUILD.bazel
@@ -0,0 +1,81 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_ENV = """[
+ {cache_ttl, 15000},
+ {cache_module, rabbit_auth_cache_ets},
+ {cache_module_args, []},
+ {cached_backend, rabbit_auth_backend_internal},
+ {cache_refusals, false}
+ ]"""
+
+APP_NAME = "rabbitmq_auth_backend_cache"
+
+APP_DESCRIPTION = "RabbitMQ Authentication Backend cache"
+
+APP_MODULE = "rabbit_auth_backend_cache_app"
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+FIRST_SRCS = [
+ "src/rabbit_auth_cache.erl",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ first_srcs = FIRST_SRCS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_auth_backend_cache"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_auth_backend_cache_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "rabbit_auth_cache_SUITE",
+ runtime_deps = [
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_auth_backend_cache/Makefile b/deps/rabbitmq_auth_backend_cache/Makefile
index 1b7d375828..d9b1c38763 100644
--- a/deps/rabbitmq_auth_backend_cache/Makefile
+++ b/deps/rabbitmq_auth_backend_cache/Makefile
@@ -28,5 +28,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_auth_backend_cache/erlang.mk b/deps/rabbitmq_auth_backend_cache/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_auth_backend_cache/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_auth_backend_cache/include/rabbit_auth_backend_cache.hrl b/deps/rabbitmq_auth_backend_cache/include/rabbit_auth_backend_cache.hrl
new file mode 100644
index 0000000000..e59e65c14c
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_cache/include/rabbit_auth_backend_cache.hrl
@@ -0,0 +1,9 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% Same as default channel operation timeout.
+-define(CACHE_OPERATION_TIMEOUT, 15000).
diff --git a/deps/rabbitmq_auth_backend_cache/rabbitmq-components.mk b/deps/rabbitmq_auth_backend_cache/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_auth_backend_cache/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache.erl b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache.erl
index 8a556723b5..61e934d559 100644
--- a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache.erl
+++ b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_cache).
diff --git a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache_app.erl b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache_app.erl
index c54f95393f..7c8d4d306b 100644
--- a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache_app.erl
+++ b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_backend_cache_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_cache_app).
diff --git a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache.erl b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache.erl
index e1b7418d15..194509a3fc 100644
--- a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache.erl
+++ b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_cache).
diff --git a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_dict.erl b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_dict.erl
index ce800a886e..9dc9d4721f 100644
--- a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_dict.erl
+++ b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_dict.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_cache_dict).
@@ -12,6 +12,8 @@
-behaviour(rabbit_auth_cache).
+-include("rabbit_auth_backend_cache.hrl").
+
-export([start_link/0,
get/1, put/3, delete/1]).
@@ -20,9 +22,11 @@
start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-get(Key) -> gen_server:call(?MODULE, {get, Key}).
+get(Key) -> gen_server:call(?MODULE, {get, Key}, ?CACHE_OPERATION_TIMEOUT).
+
put(Key, Value, TTL) -> gen_server:cast(?MODULE, {put, Key, Value, TTL}).
-delete(Key) -> gen_server:call(?MODULE, {delete, Key}).
+
+delete(Key) -> gen_server:call(?MODULE, {delete, Key}, ?CACHE_OPERATION_TIMEOUT).
init(_Args) -> {ok, nostate}.
diff --git a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets.erl b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets.erl
index 4cd36c2b3a..020885ba17 100644
--- a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets.erl
+++ b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_cache_ets).
@@ -10,6 +10,8 @@
-compile({no_auto_import,[get/1]}).
-compile({no_auto_import,[put/2]}).
+-include("rabbit_auth_backend_cache.hrl").
+
-behaviour(rabbit_auth_cache).
-export([start_link/0,
@@ -18,17 +20,23 @@
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
--record(state, {cache, timers, ttl}).
+-record(state, {
+ cache,
+ timers,
+ ttl
+}).
start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-get(Key) -> gen_server:call(?MODULE, {get, Key}).
+get(Key) -> gen_server:call(?MODULE, {get, Key}, ?CACHE_OPERATION_TIMEOUT).
+
put(Key, Value, TTL) ->
Expiration = rabbit_auth_cache:expiration(TTL),
gen_server:cast(?MODULE, {put, Key, Value, TTL, Expiration}).
-delete(Key) -> gen_server:call(?MODULE, {delete, Key}).
-init(_Args) ->
+delete(Key) -> gen_server:call(?MODULE, {delete, Key}, ?CACHE_OPERATION_TIMEOUT).
+
+init([]) ->
{ok, #state{cache = ets:new(?MODULE, [set, private]),
timers = ets:new(auth_cache_ets_timers, [set, private])}}.
diff --git a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented.erl b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented.erl
index cc7bcbfc02..c4dd827768 100644
--- a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented.erl
+++ b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_cache_ets_segmented).
@@ -16,6 +16,8 @@
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
+-include("rabbit_auth_backend_cache.hrl").
+
-record(state, {
segments = [],
gc_timer,
@@ -32,13 +34,13 @@ get(Key) ->
put(Key, Value, TTL) ->
Expiration = rabbit_auth_cache:expiration(TTL),
- Segment = gen_server:call(?MODULE, {get_write_segment, Expiration}),
+ Segment = gen_server:call(?MODULE, {get_write_segment, Expiration}, ?CACHE_OPERATION_TIMEOUT),
ets:insert(Segment, {Key, {Expiration, Value}}),
ok.
delete(Key) ->
[ets:delete(Table, Key)
- || Table <- gen_server:call(?MODULE, get_segment_tables)].
+ || Table <- gen_server:call(?MODULE, get_segment_tables, ?CACHE_OPERATION_TIMEOUT)].
gc() ->
case whereis(?MODULE) of
@@ -96,7 +98,7 @@ maybe_add_segment(Expiration, SegmentSize, OldSegments) ->
end.
get_from_segments(Key) ->
- Tables = gen_server:call(?MODULE, get_segment_tables),
+ Tables = gen_server:call(?MODULE, get_segment_tables, ?CACHE_OPERATION_TIMEOUT),
lists:flatmap(
fun(undefined) -> [];
(T) ->
diff --git a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented_stateless.erl b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented_stateless.erl
index fb959d2031..82a78a62ea 100644
--- a/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented_stateless.erl
+++ b/deps/rabbitmq_auth_backend_cache/src/rabbit_auth_cache_ets_segmented_stateless.erl
@@ -2,13 +2,15 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_cache_ets_segmented_stateless).
-behaviour(gen_server).
-behaviour(rabbit_auth_cache).
+-include("rabbit_auth_backend_cache.hrl").
+
-export([start_link/1,
get/1, put/3, delete/1]).
-export([gc/0]).
@@ -90,7 +92,7 @@ segment(Expiration, SegmentSize) ->
End.
add_segment(Segment) ->
- gen_server:call(?MODULE, {add_segment, Segment}).
+ gen_server:call(?MODULE, {add_segment, Segment}, ?CACHE_OPERATION_TIMEOUT).
do_add_segment(Segment) ->
case ets:lookup(?SEGMENT_TABLE, Segment) of
diff --git a/deps/rabbitmq_auth_backend_cache/test/config_schema_SUITE.erl b/deps/rabbitmq_auth_backend_cache/test/config_schema_SUITE.erl
index bff0f60de0..813c01d884 100644
--- a/deps/rabbitmq_auth_backend_cache/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_cache/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_auth_backend_cache/test/rabbit_auth_backend_cache_SUITE.erl b/deps/rabbitmq_auth_backend_cache/test/rabbit_auth_backend_cache_SUITE.erl
index 64752bf70b..ea66084596 100644
--- a/deps/rabbitmq_auth_backend_cache/test/rabbit_auth_backend_cache_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_cache/test/rabbit_auth_backend_cache_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_cache_SUITE).
diff --git a/deps/rabbitmq_auth_backend_cache/test/rabbit_auth_cache_SUITE.erl b/deps/rabbitmq_auth_backend_cache/test/rabbit_auth_cache_SUITE.erl
index 5acb5f5a13..2f5d0b89ff 100644
--- a/deps/rabbitmq_auth_backend_cache/test/rabbit_auth_cache_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_cache/test/rabbit_auth_cache_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_cache_SUITE).
diff --git a/deps/rabbitmq_auth_backend_http/BUILD.bazel b/deps/rabbitmq_auth_backend_http/BUILD.bazel
new file mode 100644
index 0000000000..505a4a7491
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_http/BUILD.bazel
@@ -0,0 +1,95 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_ENV = """[
+ {http_method, get},
+ {user_path, "http://localhost:8000/auth/user"},
+ {vhost_path, "http://localhost:8000/auth/vhost"},
+ {resource_path, "http://localhost:8000/auth/resource"},
+ {topic_path, "http://localhost:8000/auth/topic"}
+ ]"""
+
+APP_NAME = "rabbitmq_auth_backend_http"
+
+APP_DESCRIPTION = "RabbitMQ HTTP Authentication Backend"
+
+APP_MODULE = "rabbit_auth_backend_http_app"
+
+EXTRA_APPS = [
+ "inets",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/amqp_client:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ extra_apps = EXTRA_APPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_auth_backend_http"
+
+suites = [
+ rabbitmq_suite(
+ name = "auth_SUITE",
+ size = "small",
+ additional_srcs = [
+ "test/auth_http_mock.erl",
+ ],
+ runtime_deps = [
+ "@cowboy//:bazel_erlang_lib",
+ ],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ size = "small",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_auth_backend_http/Makefile b/deps/rabbitmq_auth_backend_http/Makefile
index 56480b4aa9..21a71d0939 100644
--- a/deps/rabbitmq_auth_backend_http/Makefile
+++ b/deps/rabbitmq_auth_backend_http/Makefile
@@ -4,7 +4,9 @@ PROJECT_MOD = rabbit_auth_backend_http_app
define PROJECT_ENV
[
- {http_method, get},
+ {http_method, get},
+ {request_timeout, 15000},
+ {connection_timeout, 15000},
{user_path, "http://localhost:8000/auth/user"},
{vhost_path, "http://localhost:8000/auth/vhost"},
{resource_path, "http://localhost:8000/auth/resource"},
@@ -29,5 +31,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_auth_backend_http/erlang.mk b/deps/rabbitmq_auth_backend_http/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_auth_backend_http/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/pom.xml b/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/pom.xml
index 50f0f9135d..b62a946ee2 100644
--- a/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/pom.xml
+++ b/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/pom.xml
@@ -4,10 +4,32 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
+ <licenses>
+ <license>
+ <name>MPL 2.0</name>
+ <url>https://www.mozilla.org/en-US/MPL/2.0/</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+
+ <developers>
+ <developer>
+ <email>info@rabbitmq.com</email>
+ <name>Team RabbitMQ</name>
+ <organization>VMware, Inc. or its affiliates.</organization>
+ <organizationUrl>https://rabbitmq.com</organizationUrl>
+ </developer>
+ </developers>
+
+ <organization>
+ <name>VMware, Inc. or its affiliates.</name>
+ <url>https://www.rabbitmq.com</url>
+ </organization>
+
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
- <version>2.1.5.RELEASE</version>
+ <version>2.4.1</version>
</parent>
<properties>
@@ -30,6 +52,13 @@
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
+
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.13.1</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
diff --git a/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/src/main/java/com/rabbitmq/examples/AuthBackendHttpController.java b/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/src/main/java/com/rabbitmq/examples/AuthBackendHttpController.java
index 0de4f340af..1949623e63 100644
--- a/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/src/main/java/com/rabbitmq/examples/AuthBackendHttpController.java
+++ b/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot/src/main/java/com/rabbitmq/examples/AuthBackendHttpController.java
@@ -1,4 +1,4 @@
-/**
+/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
@@ -10,7 +10,6 @@ package com.rabbitmq.examples;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
@@ -23,7 +22,7 @@ import static java.util.Arrays.asList;
import static org.springframework.util.StringUtils.collectionToDelimitedString;
/**
- *
+ * A basic controller that implements all RabbitMQ authN/authZ interface operations.
*/
@RestController
@RequestMapping(path = "/auth", method = { RequestMethod.GET, RequestMethod.POST })
@@ -39,11 +38,12 @@ public class AuthBackendHttpController {
@RequestMapping("user")
public String user(@RequestParam("username") String username,
@RequestParam("password") String password) {
- LOGGER.info("Trying to authenticate user {}", username);
User user = users.get(username);
if (user != null && user.getPassword().equals(password)) {
+ LOGGER.info("Successfully authenticated user {}", username);
return "allow " + collectionToDelimitedString(user.getTags(), " ");
} else {
+ LOGGER.info("Failed to authenticate user {}", username);
return "deny";
}
}
@@ -62,7 +62,9 @@ public class AuthBackendHttpController {
@RequestMapping("topic")
public String topic(TopicCheck check) {
- LOGGER.info("Checking topic access with {}", check);
- return check.getRouting_key().startsWith("a") ? "allow" : "deny";
+ boolean result = check.getRouting_key().startsWith("a");
+ LOGGER.info("Checking topic access with {}, result: {}", check, result);
+
+ return result ? "allow" : "deny";
}
}
diff --git a/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot_kotlin/src/main/resources/rabbitmq.conf b/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot_kotlin/src/main/resources/rabbitmq.conf
index 02737bd5a5..1ce8fa44e5 100644
--- a/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot_kotlin/src/main/resources/rabbitmq.conf
+++ b/deps/rabbitmq_auth_backend_http/examples/rabbitmq_auth_backend_spring_boot_kotlin/src/main/resources/rabbitmq.conf
@@ -4,6 +4,8 @@
# http backend
auth_backends.1 = rabbit_auth_backend_http
+auth_http.timeout = 10000
+auth_http.connect_timeout = 5000
auth_http.user_path = http://<server:port>/<path>/auth/user
auth_http.vhost_path = http://<server:port>/<path>/auth/vhost
auth_http.resource_path = http://<server:port>/<path>/auth/resource
diff --git a/deps/rabbitmq_auth_backend_http/priv/schema/rabbitmq_auth_backend_http.schema b/deps/rabbitmq_auth_backend_http/priv/schema/rabbitmq_auth_backend_http.schema
index 874c55b2c4..ceffd0d773 100644
--- a/deps/rabbitmq_auth_backend_http/priv/schema/rabbitmq_auth_backend_http.schema
+++ b/deps/rabbitmq_auth_backend_http/priv/schema/rabbitmq_auth_backend_http.schema
@@ -19,3 +19,9 @@
{mapping, "auth_http.topic_path", "rabbitmq_auth_backend_http.topic_path",
[{datatype, string}, {validators, ["uri"]}]}.
+
+{mapping, "auth_http.request_timeout", "rabbitmq_auth_backend_http.request_timeout",
+ [{datatype, integer}]}.
+
+{mapping, "auth_http.connection_timeout", "rabbitmq_auth_backend_http.connection_timeout",
+ [{datatype, integer}]}.
diff --git a/deps/rabbitmq_auth_backend_http/rabbitmq-components.mk b/deps/rabbitmq_auth_backend_http/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_auth_backend_http/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http.erl b/deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http.erl
index e28ac91e21..c54c757bb1 100644
--- a/deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http.erl
+++ b/deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_http).
@@ -139,10 +139,28 @@ do_http_req(Path0, Query) ->
rabbit_log:debug("auth_backend_http: POST ~s", [Path0]),
{Path0, [{"Host", HostHdr}], "application/x-www-form-urlencoded", Query}
end,
- HttpOpts = case application:get_env(rabbitmq_auth_backend_http,
- ssl_options) of
- {ok, Opts} when is_list(Opts) -> [{ssl, Opts}];
- _ -> []
+ RequestTimeout =
+ case application:get_env(rabbitmq_auth_backend_http, request_timeout) of
+ {ok, Val1} -> Val1;
+ _ -> infinity
+ end,
+ ConnectionTimeout =
+ case application:get_env(rabbitmq_auth_backend_http, connection_timeout) of
+ {ok, Val2} -> Val2;
+ _ -> RequestTimeout
+ end,
+ rabbit_log:debug("auth_backend_http: request timeout: ~p, connection timeout: ~p", [RequestTimeout, ConnectionTimeout]),
+ HttpOpts = case application:get_env(rabbitmq_auth_backend_http, ssl_options) of
+ {ok, Opts} when is_list(Opts) ->
+ [
+ {ssl, Opts},
+ {timeout, RequestTimeout},
+ {connect_timeout, ConnectionTimeout}];
+ _ ->
+ [
+ {timeout, RequestTimeout},
+ {connect_timeout, ConnectionTimeout}
+ ]
end,
case httpc:request(Method, Request, HttpOpts, []) of
diff --git a/deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http_app.erl b/deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http_app.erl
index 139f888fd7..d6d362f69d 100644
--- a/deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http_app.erl
+++ b/deps/rabbitmq_auth_backend_http/src/rabbit_auth_backend_http_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_http_app).
diff --git a/deps/rabbitmq_auth_backend_http/test/auth_SUITE.erl b/deps/rabbitmq_auth_backend_http/test/auth_SUITE.erl
index c8e7d37373..9074943aa9 100644
--- a/deps/rabbitmq_auth_backend_http/test/auth_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_http/test/auth_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(auth_SUITE).
diff --git a/deps/rabbitmq_auth_backend_http/test/config_schema_SUITE.erl b/deps/rabbitmq_auth_backend_http/test/config_schema_SUITE.erl
index f6f0d17414..28d40339b2 100644
--- a/deps/rabbitmq_auth_backend_http/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_http/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_auth_backend_http/test/config_schema_SUITE_data/rabbitmq_auth_backend_http.snippets b/deps/rabbitmq_auth_backend_http/test/config_schema_SUITE_data/rabbitmq_auth_backend_http.snippets
index e12266db45..748515344c 100644
--- a/deps/rabbitmq_auth_backend_http/test/config_schema_SUITE_data/rabbitmq_auth_backend_http.snippets
+++ b/deps/rabbitmq_auth_backend_http/test/config_schema_SUITE_data/rabbitmq_auth_backend_http.snippets
@@ -23,4 +23,21 @@
{vhost_path,"http://some-server/auth/vhost"},
{resource_path,"http://some-server/auth/resource"}]}],
[rabbitmq_auth_backend_http]}
+
+, {timeouts,
+ "auth_backends.1 = http
+ auth_http.user_path = http://some-server/auth/user
+ auth_http.vhost_path = http://some-server/auth/vhost
+ auth_http.resource_path = http://some-server/auth/resource
+ auth_http.request_timeout = 30000
+ auth_http.connection_timeout = 30000",
+ [{rabbit,[{auth_backends,[rabbit_auth_backend_http]}]},
+ {rabbitmq_auth_backend_http,
+ [
+ {connection_timeout,30000},
+ {request_timeout,30000},
+ {user_path,"http://some-server/auth/user"},
+ {vhost_path,"http://some-server/auth/vhost"},
+ {resource_path,"http://some-server/auth/resource"}]}],
+ [rabbitmq_auth_backend_http]}
].
diff --git a/deps/rabbitmq_auth_backend_http/test/unit_SUITE.erl b/deps/rabbitmq_auth_backend_http/test/unit_SUITE.erl
index 552399a313..e081ca397a 100644
--- a/deps/rabbitmq_auth_backend_http/test/unit_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_http/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
diff --git a/deps/rabbitmq_auth_backend_ldap/BUILD.bazel b/deps/rabbitmq_auth_backend_ldap/BUILD.bazel
new file mode 100644
index 0000000000..3e10dc3bf8
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_ldap/BUILD.bazel
@@ -0,0 +1,114 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_ENV = """[
+ {servers, undefined},
+ {user_bind_pattern, none},
+ {user_dn_pattern, "$${username}"},
+ {dn_lookup_attribute, none},
+ {dn_lookup_base, none},
+ {group_lookup_base, none},
+ {dn_lookup_bind, as_user},
+ {other_bind, as_user},
+ {anon_auth, false},
+ {vhost_access_query, {constant, true}},
+ {resource_access_query, {constant, true}},
+ {topic_access_query, {constant, true}},
+ {tag_queries, [{administrator, {constant, false}}]},
+ {use_ssl, false},
+ {use_starttls, false},
+ {ssl_options, []},
+ {port, 389},
+ {timeout, infinity},
+ {log, false},
+ {pool_size, 64},
+ {idle_timeout, 300000}
+ ]"""
+
+APP_NAME = "rabbitmq_auth_backend_ldap"
+
+APP_DESCRIPTION = "RabbitMQ LDAP Authentication Backend"
+
+APP_MODULE = "rabbit_auth_backend_ldap_app"
+
+EXTRA_APPS = [
+ "eldap",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ extra_apps = EXTRA_APPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_auth_backend_ldap"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ size = "medium",
+ additional_srcs = [
+ "test/rabbit_ldap_seed.erl",
+ ],
+ data = [
+ "example/global.ldif",
+ "example/memberof_init.ldif",
+ "example/refint_1.ldif",
+ "example/refint_2.ldif",
+ ],
+ tags = [
+ "ldap",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ size = "small",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_auth_backend_ldap/Makefile b/deps/rabbitmq_auth_backend_ldap/Makefile
index 0ee0c7e0c9..b81a3624c9 100644
--- a/deps/rabbitmq_auth_backend_ldap/Makefile
+++ b/deps/rabbitmq_auth_backend_ldap/Makefile
@@ -48,5 +48,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_auth_backend_ldap/erlang.mk b/deps/rabbitmq_auth_backend_ldap/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_auth_backend_ldap/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_auth_backend_ldap/include/logging.hrl b/deps/rabbitmq_auth_backend_ldap/include/logging.hrl
new file mode 100644
index 0000000000..77b923ca7c
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_ldap/include/logging.hrl
@@ -0,0 +1,3 @@
+-include_lib("rabbit_common/include/logging.hrl").
+
+-define(RMQLOG_DOMAIN_LDAP, ?DEFINE_RMQLOG_DOMAIN(ldap)).
diff --git a/deps/rabbitmq_auth_backend_ldap/priv/schema/rabbitmq_auth_backend_ldap.schema b/deps/rabbitmq_auth_backend_ldap/priv/schema/rabbitmq_auth_backend_ldap.schema
index ae247dca91..669e279125 100644
--- a/deps/rabbitmq_auth_backend_ldap/priv/schema/rabbitmq_auth_backend_ldap.schema
+++ b/deps/rabbitmq_auth_backend_ldap/priv/schema/rabbitmq_auth_backend_ldap.schema
@@ -337,3 +337,19 @@ fun(Conf) ->
Settings = cuttlefish_variable:filter_by_prefix("auth_ldap.ssl_options.versions", Conf),
[ V || {_, V} <- Settings ]
end}.
+
+{mapping, "auth_ldap.ssl_options.sni", "rabbitmq_auth_backend_ldap.ssl_options.server_name_indication",
+ [{datatype, [{enum, [none]}, string]}]}.
+
+{translation, "rabbitmq_auth_backend_ldap.ssl_options.server_name_indication",
+fun(Conf) ->
+ case cuttlefish:conf_get("auth_ldap.ssl_options.sni", Conf, undefined) of
+ undefined -> cuttlefish:unset();
+ none -> cuttlefish:unset();
+ Hostname -> Hostname
+ end
+end}.
+
+
+{mapping, "auth_ldap.ssl_options.hostname_verification", "rabbitmq_auth_backend_ldap.ssl_hostname_verification", [
+ {datatype, {enum, [wildcard, none]}}]}.
diff --git a/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk b/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl b/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl
index 5664cbf35e..f1ea90b475 100644
--- a/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl
+++ b/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_ldap).
@@ -483,7 +483,7 @@ with_ldap({ok, Creds}, Fun, Servers) ->
network ->
Pre = " LDAP network traffic: ",
rabbit_log_ldap:info(
- " LDAP connecting to servers: ~p~n", [Servers]),
+ " LDAP connecting to servers: ~p", [Servers]),
[{log, fun(1, S, A) -> rabbit_log_ldap:warning(Pre ++ S, A);
(2, S, A) ->
rabbit_log_ldap:info(Pre ++ S, scrub_creds(A, []))
@@ -491,7 +491,7 @@ with_ldap({ok, Creds}, Fun, Servers) ->
network_unsafe ->
Pre = " LDAP network traffic: ",
rabbit_log_ldap:info(
- " LDAP connecting to servers: ~p~n", [Servers]),
+ " LDAP connecting to servers: ~p", [Servers]),
[{log, fun(1, S, A) -> rabbit_log_ldap:warning(Pre ++ S, A);
(2, S, A) -> rabbit_log_ldap:info( Pre ++ S, A)
end} | Opts0];
@@ -706,18 +706,27 @@ eldap_open(Servers, Opts) ->
ssl_conf() ->
%% We must make sure not to add SSL options unless a) we have at least R16A
%% b) we have SSL turned on (or it breaks StartTLS...)
- case env(use_ssl) of
+ case env(use_ssl, false) of
false -> [{ssl, false}];
true -> %% Only the unfixed version can be []
- case {env(ssl_options), at_least("5.10")} of %% R16A
- {_, true} -> [{ssl, true}, {sslopts, ssl_options()}];
- {[], _} -> [{ssl, true}];
- {_, false} -> exit({ssl_options_requires_min_r16a})
+ case env(ssl_options) of
+ [] -> [{ssl, true}];
+ undefined -> [{ssl, true}];
+ _ -> [{ssl, true}, {sslopts, ssl_options()}]
end
end.
ssl_options() ->
- rabbit_networking:fix_ssl_options(env(ssl_options)).
+ Opts0 = rabbit_networking:fix_ssl_options(env(ssl_options)),
+ case env(ssl_hostname_verification, undefined) of
+ wildcard ->
+ rabbit_log_ldap:debug("Enabling wildcard-aware hostname verification for LDAP client connections"),
+ %% Needed for non-HTTPS connections that connect to servers that use wildcard certificates.
+ %% See https://erlang.org/doc/man/public_key.html#pkix_verify_hostname_match_fun-1.
+ [{customize_hostname_check, [{match_fun, public_key:pkix_verify_hostname_match_fun(https)}]} | Opts0];
+ _ ->
+ Opts0
+ end.
at_least(Ver) ->
rabbit_misc:version_compare(erlang:system_info(version), Ver) =/= lt.
@@ -728,7 +737,7 @@ get_expected_env_str(Key, Default) ->
V = case env(Key) of
Default ->
rabbit_log_ldap:warning("rabbitmq_auth_backend_ldap configuration key '~p' is set to "
- "the default value of '~p', expected to get a non-default value~n",
+ "the default value of '~p', expected to get a non-default value",
[Key, Default]),
Default;
V0 ->
@@ -736,9 +745,14 @@ get_expected_env_str(Key, Default) ->
end,
rabbit_data_coercion:to_list(V).
-env(F) ->
- {ok, V} = application:get_env(rabbitmq_auth_backend_ldap, F),
- V.
+env(Key) ->
+ case application:get_env(rabbitmq_auth_backend_ldap, Key) of
+ {ok, V} -> V;
+ undefined -> undefined
+ end.
+
+env(Key, Default) ->
+ application:get_env(rabbitmq_auth_backend_ldap, Key, Default).
login_fun(User, UserDN, Password, AuthProps) ->
fun(L) -> case pget(vhost, AuthProps) of
@@ -819,7 +833,7 @@ dn_lookup(Username, LDAP) ->
?L1("DN lookup: ~s -> ~s", [Username, DN]),
DN;
{ok, #eldap_search_result{entries = Entries}} ->
- rabbit_log_ldap:warning("Searching for DN for ~s, got back ~p~n",
+ rabbit_log_ldap:warning("Searching for DN for ~s, got back ~p",
[Filled, Entries]),
Filled;
{error, _} = E ->
@@ -889,20 +903,15 @@ scrub_rdn([DN|Rem], Acc) ->
scrub_rdn(Rem, [string:join(DN0, "=")|Acc]).
is_dn(S) when is_list(S) ->
- case catch string:tokens(to_list(S), "=") of
+ case catch string:tokens(rabbit_data_coercion:to_list(S), "=") of
L when length(L) > 1 -> true;
_ -> false
end;
is_dn(_S) -> false.
-to_list(S) when is_list(S) -> S;
-to_list(S) when is_binary(S) -> binary_to_list(S);
-to_list(S) when is_atom(S) -> atom_to_list(S);
-to_list(S) -> {error, {badarg, S}}.
-
log(Fmt, Args) -> case env(log) of
false -> ok;
- _ -> rabbit_log_ldap:info(Fmt ++ "~n", Args)
+ _ -> rabbit_log_ldap:info(Fmt ++ "", Args)
end.
fill(Fmt, Args) ->
diff --git a/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl b/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl
index 74c5a51598..48468620bb 100644
--- a/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl
+++ b/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_ldap_app).
@@ -10,7 +10,8 @@
-behaviour(application).
-export([start/2, stop/1]).
-%% Dummy supervisor to get this application behaviour working
+%% Dummy supervisor - see Ulf Wiger's comment at
+%% http://erlang.org/pipermail/erlang-questions/2010-April/050508.html
-behaviour(supervisor).
-export([create_ldap_pool/0, init/1]).
diff --git a/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl b/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl
index 1609255cc6..21e808b982 100644
--- a/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl
+++ b/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_ldap_util).
diff --git a/deps/rabbitmq_auth_backend_ldap/src/rabbit_log_ldap.erl b/deps/rabbitmq_auth_backend_ldap/src/rabbit_log_ldap.erl
new file mode 100644
index 0000000000..632989ce8e
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_ldap/src/rabbit_log_ldap.erl
@@ -0,0 +1,107 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_ldap).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include("logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+%%----------------------------------------------------------------------------
+
+-spec debug(string()) -> 'ok'.
+-spec debug(string(), [any()]) -> 'ok'.
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec info(string()) -> 'ok'.
+-spec info(string(), [any()]) -> 'ok'.
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec notice(string()) -> 'ok'.
+-spec notice(string(), [any()]) -> 'ok'.
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec warning(string()) -> 'ok'.
+-spec warning(string(), [any()]) -> 'ok'.
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec error(string()) -> 'ok'.
+-spec error(string(), [any()]) -> 'ok'.
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec critical(string()) -> 'ok'.
+-spec critical(string(), [any()]) -> 'ok'.
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec alert(string()) -> 'ok'.
+-spec alert(string(), [any()]) -> 'ok'.
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec emergency(string()) -> 'ok'.
+-spec emergency(string(), [any()]) -> 'ok'.
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec none(string()) -> 'ok'.
+-spec none(string(), [any()]) -> 'ok'.
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+
+%%----------------------------------------------------------------------------
+
+debug(Format) -> debug(Format, []).
+debug(Format, Args) -> debug(self(), Format, Args).
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_LDAP}).
+
+info(Format) -> info(Format, []).
+info(Format, Args) -> info(self(), Format, Args).
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_LDAP}).
+
+notice(Format) -> notice(Format, []).
+notice(Format, Args) -> notice(self(), Format, Args).
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_LDAP}).
+
+warning(Format) -> warning(Format, []).
+warning(Format, Args) -> warning(self(), Format, Args).
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_LDAP}).
+
+error(Format) -> error(Format, []).
+error(Format, Args) -> error(self(), Format, Args).
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_LDAP}).
+
+critical(Format) -> critical(Format, []).
+critical(Format, Args) -> critical(self(), Format, Args).
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_LDAP}).
+
+alert(Format) -> alert(Format, []).
+alert(Format, Args) -> alert(self(), Format, Args).
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_LDAP}).
+
+emergency(Format) -> emergency(Format, []).
+emergency(Format, Args) -> emergency(self(), Format, Args).
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_LDAP}).
+
+none(_Format) -> ok.
+none(_Format, _Args) -> ok.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE.erl b/deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE.erl
index 1bc6136178..7a138f55af 100644
--- a/deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE_data/rabbitmq_auth_backend_ldap.snippets b/deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE_data/rabbitmq_auth_backend_ldap.snippets
index 7e4ba70cec..c07e8aa378 100644
--- a/deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE_data/rabbitmq_auth_backend_ldap.snippets
+++ b/deps/rabbitmq_auth_backend_ldap/test/config_schema_SUITE_data/rabbitmq_auth_backend_ldap.snippets
@@ -271,6 +271,68 @@
{verify,verify_peer},
{fail_if_no_peer_cert, false},
{honor_ecc_order, true}]}]}],
- []}
+ []},
+
+ {ssl_options_sni_disabled,
+ "auth_ldap.use_ssl = true
+ auth_ldap.ssl_options.cacertfile = test/config_schema_SUITE_data/certs/cacert.pem
+ auth_ldap.ssl_options.certfile = test/config_schema_SUITE_data/certs/cert.pem
+ auth_ldap.ssl_options.keyfile = test/config_schema_SUITE_data/certs/key.pem
+ auth_ldap.ssl_options.versions.tls1_2 = tlsv1.2
+ auth_ldap.ssl_options.versions.tls1_1 = tlsv1.1
+
+ auth_ldap.ssl_options.sni = none",
+ [],
+ [{rabbitmq_auth_backend_ldap,
+ [{ssl_options,
+ [{cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
+ {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
+ {keyfile,"test/config_schema_SUITE_data/certs/key.pem"},
+ {versions,['tlsv1.2','tlsv1.1']}]
+ },
+ {use_ssl, true}]}],
+ []},
+
+ {ssl_options_sni_hostname,
+ "auth_ldap.use_ssl = true
+ auth_ldap.ssl_options.cacertfile = test/config_schema_SUITE_data/certs/cacert.pem
+ auth_ldap.ssl_options.certfile = test/config_schema_SUITE_data/certs/cert.pem
+ auth_ldap.ssl_options.keyfile = test/config_schema_SUITE_data/certs/key.pem
+ auth_ldap.ssl_options.versions.tls1_2 = tlsv1.2
+ auth_ldap.ssl_options.versions.tls1_1 = tlsv1.1
+ auth_ldap.ssl_options.sni = hostname.dev",
+ [],
+ [{rabbitmq_auth_backend_ldap,
+ [{ssl_options,
+ [{cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
+ {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
+ {keyfile,"test/config_schema_SUITE_data/certs/key.pem"},
+ {versions,['tlsv1.2','tlsv1.1']},
+ {server_name_indication, "hostname.dev"}
+ ]},
+ {use_ssl, true}]}],
+ []},
+
+ {ssl_options_hostname_verification_wildcard,
+ "auth_ldap.use_ssl = true
+ auth_ldap.ssl_options.cacertfile = test/config_schema_SUITE_data/certs/cacert.pem
+ auth_ldap.ssl_options.certfile = test/config_schema_SUITE_data/certs/cert.pem
+ auth_ldap.ssl_options.keyfile = test/config_schema_SUITE_data/certs/key.pem
+ auth_ldap.ssl_options.versions.tls1_2 = tlsv1.2
+ auth_ldap.ssl_options.versions.tls1_1 = tlsv1.1
+
+ auth_ldap.ssl_options.hostname_verification = wildcard",
+ [],
+ [{rabbitmq_auth_backend_ldap,
+ [
+ {ssl_hostname_verification, wildcard},
+ {ssl_options,
+ [{cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
+ {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
+ {keyfile,"test/config_schema_SUITE_data/certs/key.pem"},
+ {versions,['tlsv1.2','tlsv1.1']}
+ ]},
+ {use_ssl, true}]}],
+ []}
].
diff --git a/deps/rabbitmq_auth_backend_ldap/test/rabbit_ldap_seed.erl b/deps/rabbitmq_auth_backend_ldap/test/rabbit_ldap_seed.erl
index d881250040..1eace23997 100644
--- a/deps/rabbitmq_auth_backend_ldap/test/rabbit_ldap_seed.erl
+++ b/deps/rabbitmq_auth_backend_ldap/test/rabbit_ldap_seed.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_ldap_seed).
diff --git a/deps/rabbitmq_auth_backend_ldap/test/system_SUITE.erl b/deps/rabbitmq_auth_backend_ldap/test/system_SUITE.erl
index 34d692ab45..936d77aa6c 100644
--- a/deps/rabbitmq_auth_backend_ldap/test/system_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_ldap/test/system_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
diff --git a/deps/rabbitmq_auth_backend_ldap/test/unit_SUITE.erl b/deps/rabbitmq_auth_backend_ldap/test/unit_SUITE.erl
index af318615f8..36b9f3034c 100644
--- a/deps/rabbitmq_auth_backend_ldap/test/unit_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_ldap/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
diff --git a/deps/rabbitmq_auth_backend_oauth2/BUILD.bazel b/deps/rabbitmq_auth_backend_oauth2/BUILD.bazel
new file mode 100644
index 0000000000..314aad2563
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/BUILD.bazel
@@ -0,0 +1,121 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_auth_backend_oauth2"
+
+APP_DESCRIPTION = "OAuth 2 and JWT-based AuthN and AuthZ backend"
+
+BUILD_DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_cli:rabbitmqctl",
+]
+
+DEPS = [
+ "@jose//:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "@base64url//:bazel_erlang_lib",
+ "@cowlib//:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+erlc(
+ name = "rabbit_auth_backend_oauth2_test_util",
+ testonly = True,
+ srcs = [
+ "test/rabbit_auth_backend_oauth2_test_util.erl",
+ ],
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+)
+
+PACKAGE = "deps/rabbitmq_auth_backend_oauth2"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "add_uaa_key_command_SUITE",
+ size = "small",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "jwks_SUITE",
+ additional_beam = [
+ ":rabbit_auth_backend_oauth2_test_util",
+ ],
+ additional_srcs = [
+ "test/jwks_http_app.erl",
+ "test/jwks_http_handler.erl",
+ "test/jwks_http_sup.erl",
+ ],
+ runtime_deps = [
+ "@cowboy//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "scope_SUITE",
+ size = "medium",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":rabbit_auth_backend_oauth2_test_util",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":rabbit_auth_backend_oauth2_test_util",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "wildcard_match_SUITE",
+ size = "small",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_auth_backend_oauth2/Makefile b/deps/rabbitmq_auth_backend_oauth2/Makefile
index a80e608da2..7824e3d657 100644
--- a/deps/rabbitmq_auth_backend_oauth2/Makefile
+++ b/deps/rabbitmq_auth_backend_oauth2/Makefile
@@ -2,13 +2,14 @@ PROJECT = rabbitmq_auth_backend_oauth2
PROJECT_DESCRIPTION = OAuth 2 and JWT-based AuthN and AuthZ backend
BUILD_DEPS = rabbit_common
-DEPS = rabbit cowlib jose
+DEPS = rabbit cowlib jose base64url
TEST_DEPS = cowboy rabbitmq_web_dispatch rabbitmq_ct_helpers rabbitmq_ct_client_helpers amqp_client
DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-plugin.mk
DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
-dep_jose = hex 1.10.1
+dep_jose = git https://github.com/potatosalad/erlang-jose 2b1d66b
+dep_base64url = hex 1.0.1
# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
# reviewed and merged.
@@ -16,5 +17,5 @@ dep_jose = hex 1.10.1
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_auth_backend_oauth2/README.md b/deps/rabbitmq_auth_backend_oauth2/README.md
index d98d259779..b923a47fb6 100644
--- a/deps/rabbitmq_auth_backend_oauth2/README.md
+++ b/deps/rabbitmq_auth_backend_oauth2/README.md
@@ -38,11 +38,12 @@ To use this plugin
1. UAA should be configured to produce encrypted JWT tokens containing a set of RabbitMQ permission scopes
2. All RabbitMQ nodes must be [configured to use the `rabbit_auth_backend_oauth2` backend](https://www.rabbitmq.com/access-control.html)
3. All RabbitMQ nodes must be configure with a resource service ID (`resource_server_id`) that matches the scope prefix (e.g. `rabbitmq` in `rabbitmq.read:*/*`).
+4. The token **must** has a value in`aud` that match `resource_server_id` value.
### Authorization Flow
1. Client authorize with OAuth 2.0 provider, requesting an `access_token` (using any grant type desired)
-2. Token scope returned by OAuth 2.0 provider must include RabbitMQ resource scopes that follow a convention used by this plugin: `configure:%2F/foo` means "configure permissions for 'foo' in vhost '/'")
+2. Token scope returned by OAuth 2.0 provider must include RabbitMQ resource scopes that follow a convention used by this plugin: `configure:%2F/foo` means "configure permissions for 'foo' in vhost '/'") (`scope` field can be changed using `extra_scopes_source` in **advanced.config** file.
3. Client passes the token as password when connecting to a RabbitMQ node. **The username field is ignored**.
4. The translated permissions are stored as part of the authenticated connection state and used the same
way permissions from RabbitMQ's internal database would be used.
@@ -122,6 +123,62 @@ If a symmetric key is used, the configuration will look like this:
].
```
+The key set can also be retrieved dynamically from a URL serving a [JWK Set](https://tools.ietf.org/html/rfc7517#section-5).
+In that case, the configuration will look like this:
+
+```erlang
+[
+ {rabbitmq_auth_backend_oauth2, [
+ {resource_server_id, <<"my_rabbit_server">>},
+ {key_config, [
+ {jwks_url, <<"https://my-jwt-issuer/jwks.json">>}
+ ]}
+ ]},
+].
+```
+
+NOTE: `jwks_url` takes precedence over `signing_keys` if both are provided.
+
+### Variables Configurable in rabbitmq.conf
+
+| Key | Documentation
+|------------------------------------------|-----------
+| `auth_oauth2.resource_server_id` | [The Resource Server ID](#resource-server-id-and-scope-prefixes)
+| `auth_oauth2.additional_scopes_key` | Configure the plugin to also look in other fields (maps to `additional_rabbitmq_scopes` in the old format).
+| `auth_oauth2.default_key` | ID of the default signing key.
+| `auth_oauth2.signing_keys` | Paths to signing key files.
+| `auth_oauth2.jwks_url` | The URL of key server. According to the [JWT Specification](https://datatracker.ietf.org/doc/html/rfc7515#section-4.1.2) key server URL must be https.
+| `auth_oauth2.https.cacertfile` | Path to a file containing PEM-encoded CA certificates. The CA certificates are used during key server [peer verification](https://rabbitmq.com/ssl.html#peer-verification).
+| `auth_oauth2.https.depth` | The maximum number of non-self-issued intermediate certificates that may follow the peer certificate in a valid [certification path](https://rabbitmq.com/ssl.html#peer-verification-depth). Default is 10.
+| `auth_oauth2.https.peer_verification` | Should [peer verification](https://rabbitmq.com/ssl.html#peer-verification) be enabled. Available values: `verify_none`, `verify_peer`. Default is `verify_none`. It is recommended to configure `verify_peer`. Peer verification requires a certain amount of setup and is more secure.
+| `auth_oauth2.https.fail_if_no_peer_cert` | Used together with `auth_oauth2.https.peer_verification = verify_peer`. When set to `true`, TLS connection will be rejected if client fails to provide a certificate. Default is `false`.
+| `auth_oauth2.https.hostname_verification`| Enable wildcard-aware hostname verification for key server. Available values: `wildcard`, `none`. Default is `none`.
+| `auth_oauth2.algorithms` | Restrict [the usable algorithms](https://github.com/potatosalad/erlang-jose#algorithm-support).
+
+For example:
+
+Configure with key files
+```
+auth_oauth2.resource_server_id = new_resource_server_id
+auth_oauth2.additional_scopes_key = my_custom_scope_key
+auth_oauth2.default_key = id1
+auth_oauth2.signing_keys.id1 = test/config_schema_SUITE_data/certs/key.pem
+auth_oauth2.signing_keys.id2 = test/config_schema_SUITE_data/certs/cert.pem
+auth_oauth2.algorithms.1 = HS256
+auth_oauth2.algorithms.2 = RS256
+```
+Configure with key server
+```
+auth_oauth2.resource_server_id = new_resource_server_id
+auth_oauth2.jwks_url = https://my-jwt-issuer/jwks.json
+auth_oauth2.https.cacertfile = test/config_schema_SUITE_data/certs/cacert.pem
+auth_oauth2.https.peer_verification = verify_peer
+auth_oauth2.https.depth = 5
+auth_oauth2.https.fail_if_no_peer_cert = true
+auth_oauth2.https.hostname_verification = wildcard
+auth_oauth2.algorithms.1 = HS256
+auth_oauth2.algorithms.2 = RS256
+```
### Resource Server ID and Scope Prefixes
OAuth 2.0 (and thus UAA-provided) tokens use scopes to communicate what set of permissions particular
@@ -175,18 +232,31 @@ be `my_rabbit.read:*/*`.
### Using a different token field for the Scope
-By default the plugin will look for the `scope` key in the token, you can configure the plugin to also look in other fields using the `additional_rabbitmq_scopes` setting.
+By default the plugin will look for the `scope` key in the token, you can configure the plugin to also look in other fields using the `extra_scopes_source` setting. Values format accepted are scope as **string** or **list**
+
```erlang
[
{rabbitmq_auth_backend_oauth2, [
{resource_server_id, <<"my_rabbit_server">>},
- {additional_rabbitmq_scopes, <<"my_custom_scope_key">>},
+ {extra_scopes_source, <<"my_custom_scope_key">>},
...
]}
]},
].
```
+Token sample:
+```
+{
+ "exp": 1618592626,
+ "iat": 1618578226,
+ "aud" : ["my_id"],
+ ...
+ "scope_as_string": "my_id.configure:*/* my_id.read:*/* my_id.write:*/*",
+ "scope_as_list": ["my_id.configure:*/*", "my_id.read:*/*", my_id.write:*/*"],
+ ...
+ }
+```
### Using Tokens with Clients
@@ -207,7 +277,7 @@ the `monitoring` tag will be `my_rabbit.tag:monitoring`.
## Examples
-The [demo](/demo) directory contains example configuration files which can be used to set up
+The [demo](/deps/rabbitmq_auth_backend_oauth2/demo) directory contains example configuration files which can be used to set up
a development UAA server and issue tokens, which can be used to access RabbitMQ
resources.
diff --git a/deps/rabbitmq_auth_backend_oauth2/demo/rsa_docker/rabbitmq/rabbitmq.conf b/deps/rabbitmq_auth_backend_oauth2/demo/rsa_docker/rabbitmq/rabbitmq.conf
new file mode 100644
index 0000000000..660fb29bf0
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/demo/rsa_docker/rabbitmq/rabbitmq.conf
@@ -0,0 +1,6 @@
+auth_backends.1 = oauth2
+auth_oauth2.resource_server_id = new_resource_server_id
+auth_oauth2.additional_scopes_key = my_custom_scope_key
+auth_oauth2.default_key = id1
+auth_oauth2.signing_keys.id1 = test/config_schema_SUITE_data/certs/key.pem
+auth_oauth2.signing_keys.id2 = test/config_schema_SUITE_data/certs/cert.pem
diff --git a/deps/rabbitmq_auth_backend_oauth2/erlang.mk b/deps/rabbitmq_auth_backend_oauth2/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_auth_backend_oauth2/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_auth_backend_oauth2/priv/schema/rabbitmq_auth_backend_oauth2.schema b/deps/rabbitmq_auth_backend_oauth2/priv/schema/rabbitmq_auth_backend_oauth2.schema
new file mode 100644
index 0000000000..1c8593e434
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/priv/schema/rabbitmq_auth_backend_oauth2.schema
@@ -0,0 +1,128 @@
+%% ----------------------------------------------------------------------------
+%% RabbitMQ OAuth2 Plugin
+%%
+%% See https://github.com/rabbitmq/rabbitmq-server/blob/master/deps/rabbitmq_auth_backend_oauth2/ for details.
+%%
+%% ----------------------------------------------------------------------------
+
+%% A prefix used for scopes in UAA to avoid scope collisions (or unintended overlap). It is an empty string by default.
+%%
+%% {resource_server_id, <<"my_rabbit_server">>},
+
+{mapping,
+ "auth_oauth2.resource_server_id",
+ "rabbitmq_auth_backend_oauth2.resource_server_id",
+ [{datatype, string}]}.
+
+{translation,
+ "rabbitmq_auth_backend_oauth2.resource_server_id",
+ fun(Conf) -> list_to_binary(cuttlefish:conf_get("auth_oauth2.resource_server_id", Conf))
+ end}.
+
+%% Configure the plugin to also look in other fields using additional_scopes_key (maps to additional_rabbitmq_scopes in the old format)
+%%
+%% {additional_rabbitmq_scopes, <<"my_custom_scope_key">>},
+
+{mapping,
+ "auth_oauth2.additional_scopes_key",
+ "rabbitmq_auth_backend_oauth2.additional_rabbitmq_scopes",
+ [{datatype, string}]}.
+
+{translation,
+ "rabbitmq_auth_backend_oauth2.additional_rabbitmq_scopes",
+ fun(Conf) ->
+ list_to_binary(cuttlefish:conf_get("auth_oauth2.additional_scopes_key", Conf))
+ end}.
+
+%% ID of the default signing key
+%%
+%% {default_key, <<"key-1">>},
+
+{mapping,
+ "auth_oauth2.default_key",
+ "rabbitmq_auth_backend_oauth2.key_config.default_key",
+ [{datatype, string}]}.
+
+{translation,
+ "rabbitmq_auth_backend_oauth2.key_config.default_key",
+ fun(Conf) -> list_to_binary(cuttlefish:conf_get("auth_oauth2.default_key", Conf)) end}.
+
+%% A map of signing keys
+%%
+%% {signing_keys, #{<<"id1">> => {pem, <<"value1">>}, <<"id2">> => {pem, <<"value2">>}}}
+%% validator doesn't work
+
+{mapping,
+ "auth_oauth2.signing_keys.$name",
+ "rabbitmq_auth_backend_oauth2.key_config.signing_keys",
+ [{datatype, file}, {validators, ["file_accessible"]}]}.
+
+{translation,
+ "rabbitmq_auth_backend_oauth2.key_config.signing_keys",
+ fun(Conf) ->
+ Settings = cuttlefish_variable:filter_by_prefix("auth_oauth2.signing_keys", Conf),
+ TryReadingFileFun =
+ fun(Path) ->
+ case file:read_file(Path) of
+ {ok, Bin} ->
+ string:trim(Bin, trailing, "\n");
+ _ ->
+ %% this throws and makes Cuttlefish treak the key as invalid
+ cuttlefish:invalid("file does not exist or cannot be read by the node")
+ end
+ end,
+ SigningKeys =
+ lists:map(fun({Id, Path}) ->
+ {list_to_binary(lists:last(Id)), {pem, TryReadingFileFun(Path)}}
+ end, Settings),
+ maps:from_list(SigningKeys)
+ end}.
+
+{mapping,
+ "auth_oauth2.jwks_url",
+ "rabbitmq_auth_backend_oauth2.key_config.jwks_url",
+ [{datatype, string}, {validators, ["uri", "https_uri"]}]}.
+
+{mapping,
+ "auth_oauth2.https.peer_verification",
+ "rabbitmq_auth_backend_oauth2.key_config.peer_verification",
+ [{datatype, {enum, [verify_peer, verify_none]}}]}.
+
+{mapping,
+ "auth_oauth2.https.cacertfile",
+ "rabbitmq_auth_backend_oauth2.key_config.cacertfile",
+ [{datatype, file}, {validators, ["file_accessible"]}]}.
+
+{mapping,
+ "auth_oauth2.https.depth",
+ "rabbitmq_auth_backend_oauth2.key_config.depth",
+ [{datatype, integer}]}.
+
+{mapping,
+ "auth_oauth2.https.hostname_verification",
+ "rabbitmq_auth_backend_oauth2.key_config.hostname_verification",
+ [{datatype, {enum, [wildcard, none]}}]}.
+
+{mapping,
+ "auth_oauth2.https.crl_check",
+ "rabbitmq_auth_backend_oauth2.key_config.crl_check",
+ [{datatype, {enum, [true, false, peer, best_effort]}}]}.
+
+{mapping,
+ "auth_oauth2.https.fail_if_no_peer_cert",
+ "rabbitmq_auth_backend_oauth2.key_config.fail_if_no_peer_cert",
+ [{datatype, {enum, [true, false]}}]}.
+
+{validator, "https_uri", "According to the JWT Specification, Key Server URL must be https.",
+ fun(Uri) -> string:nth_lexeme(Uri, 1, "://") == "https" end}.
+
+{mapping,
+ "auth_oauth2.algorithms.$algorithm",
+ "rabbitmq_auth_backend_oauth2.key_config.algorithms",
+ [{datatype, string}]}.
+
+{translation, "rabbitmq_auth_backend_oauth2.key_config.algorithms",
+ fun(Conf) ->
+ Settings = cuttlefish_variable:filter_by_prefix("auth_oauth2.algorithms", Conf),
+ [list_to_binary(V) || {_, V} <- Settings]
+ end}.
diff --git a/deps/rabbitmq_auth_backend_oauth2/rabbitmq-components.mk b/deps/rabbitmq_auth_backend_oauth2/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_auth_backend_oauth2/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddUaaKeyCommand.erl b/deps/rabbitmq_auth_backend_oauth2/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddUaaKeyCommand.erl
index 6571ba9c59..6b78f02340 100644
--- a/deps/rabbitmq_auth_backend_oauth2/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddUaaKeyCommand.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddUaaKeyCommand.erl
@@ -2,12 +2,16 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.AddUaaKeyCommand').
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref([
+ {'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1}
+]).
+
-export([
usage/0,
validate/2,
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2.erl b/deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2.erl
index e1a99ab7ea..306c7f2cc8 100644
--- a/deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_oauth2).
@@ -41,7 +41,7 @@ description() ->
user_login_authentication(Username, AuthProps) ->
case authenticate(Username, AuthProps) of
{refused, Msg, Args} = AuthResult ->
- rabbit_log:debug(Msg ++ "~n", Args),
+ rabbit_log:debug(Msg, Args),
AuthResult;
_ = AuthResult ->
AuthResult
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2_app.erl b/deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2_app.erl
index e2b1d23131..dd4c88c800 100644
--- a/deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2_app.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/src/rabbit_auth_backend_oauth2_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_oauth2_app).
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/rabbit_oauth2_scope.erl b/deps/rabbitmq_auth_backend_oauth2/src/rabbit_oauth2_scope.erl
index 2ebf6c3c52..ec2c45290f 100644
--- a/deps/rabbitmq_auth_backend_oauth2/src/rabbit_oauth2_scope.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/src/rabbit_oauth2_scope.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_oauth2_scope).
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwks.erl b/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwks.erl
new file mode 100644
index 0000000000..d34d9d5d99
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwks.erl
@@ -0,0 +1,27 @@
+-module(uaa_jwks).
+-export([get/1]).
+
+-spec get(string() | binary()) -> {ok, term()} | {error, term()}.
+get(JwksUrl) ->
+ httpc:request(get, {JwksUrl, []}, [{ssl, ssl_options()}, {timeout, 60000}], []).
+
+-spec ssl_options() -> list().
+ssl_options() ->
+ UaaEnv = application:get_env(rabbitmq_auth_backend_oauth2, key_config, []),
+ PeerVerification = proplists:get_value(peer_verification, UaaEnv, verify_none),
+ CaCertFile = proplists:get_value(cacertfile, UaaEnv),
+ Depth = proplists:get_value(depth, UaaEnv, 10),
+ FailIfNoPeerCert = proplists:get_value(fail_if_no_peer_cert, UaaEnv, false),
+ CrlCheck = proplists:get_value(crl_check, UaaEnv, false),
+ SslOpts0 = [{verify, PeerVerification},
+ {cacertfile, CaCertFile},
+ {depth, Depth},
+ {fail_if_no_peer_cert, FailIfNoPeerCert},
+ {crl_check, CrlCheck},
+ {crl_cache, {ssl_crl_cache, {internal, [{http, 10000}]}}}],
+ case proplists:get_value(hostname_verification, UaaEnv, none) of
+ wildcard ->
+ [{customize_hostname_check, [{match_fun, public_key:pkix_verify_hostname_match_fun(https)}]} | SslOpts0];
+ none ->
+ SslOpts0
+ end. \ No newline at end of file
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt.erl b/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt.erl
index e8d59f5670..6721bbf5e7 100644
--- a/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(uaa_jwt).
@@ -51,6 +51,23 @@ update_uaa_jwt_signing_keys(UaaEnv0, SigningKeys) ->
UaaEnv2 = [{signing_keys, SigningKeys} | UaaEnv1],
application:set_env(?APP, key_config, UaaEnv2).
+-spec update_jwks_signing_keys() -> ok | {error, term()}.
+update_jwks_signing_keys() ->
+ UaaEnv = application:get_env(?APP, key_config, []),
+ case proplists:get_value(jwks_url, UaaEnv) of
+ undefined ->
+ {error, no_jwks_url};
+ JwksUrl ->
+ case uaa_jwks:get(JwksUrl) of
+ {ok, {_, _, JwksBody}} ->
+ KeyList = maps:get(<<"keys">>, jose:decode(erlang:iolist_to_binary(JwksBody)), []),
+ Keys = maps:from_list(lists:map(fun(Key) -> {maps:get(<<"kid">>, Key, undefined), {json, Key}} end, KeyList)),
+ update_uaa_jwt_signing_keys(UaaEnv, Keys);
+ {error, _} = Err ->
+ Err
+ end
+ end.
+
-spec decode_and_verify(binary()) -> {boolean(), map()} | {error, term()}.
decode_and_verify(Token) ->
case uaa_jwt_jwt:get_key_id(Token) of
@@ -67,21 +84,33 @@ decode_and_verify(Token) ->
-spec get_jwk(binary()) -> {ok, map()} | {error, term()}.
get_jwk(KeyId) ->
- case signing_keys() of
- undefined -> {error, signing_keys_not_configured};
- Keys ->
- case maps:get(KeyId, Keys, undefined) of
- undefined ->
- {error, key_not_found};
- {Type, Value} ->
- case Type of
- json -> uaa_jwt_jwk:make_jwk(Value);
- pem -> uaa_jwt_jwk:from_pem(Value);
- pem_file -> uaa_jwt_jwk:from_pem_file(Value);
- map -> uaa_jwt_jwk:make_jwk(Value);
- _ -> {error, unknown_signing_key_type}
- end
- end
+ get_jwk(KeyId, true).
+
+get_jwk(KeyId, AllowUpdateJwks) ->
+ Keys = signing_keys(),
+ case maps:get(KeyId, Keys, undefined) of
+ undefined ->
+ if
+ AllowUpdateJwks ->
+ case update_jwks_signing_keys() of
+ ok ->
+ get_jwk(KeyId, false);
+ {error, no_jwks_url} ->
+ {error, key_not_found};
+ {error, _} = Err ->
+ Err
+ end;
+ true ->
+ {error, key_not_found}
+ end;
+ {Type, Value} ->
+ case Type of
+ json -> uaa_jwt_jwk:make_jwk(Value);
+ pem -> uaa_jwt_jwk:from_pem(Value);
+ pem_file -> uaa_jwt_jwk:from_pem_file(Value);
+ map -> uaa_jwt_jwk:make_jwk(Value);
+ _ -> {error, unknown_signing_key_type}
+ end
end.
verify_signing_key(Type, Value) ->
@@ -103,7 +132,7 @@ verify_signing_key(Type, Value) ->
signing_keys() ->
UaaEnv = application:get_env(?APP, key_config, []),
- proplists:get_value(signing_keys, UaaEnv).
+ proplists:get_value(signing_keys, UaaEnv, #{}).
-spec client_id(map()) -> binary() | undefined.
client_id(DecodedToken) ->
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwk.erl b/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwk.erl
index 11d2819fb5..a258ee0051 100644
--- a/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwk.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwk.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(uaa_jwt_jwk).
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwt.erl b/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwt.erl
index c2e41c5d52..aa1cdd8241 100644
--- a/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwt.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/src/uaa_jwt_jwt.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(uaa_jwt_jwt).
@@ -24,7 +24,15 @@ decode(Token) ->
end.
decode_and_verify(Jwk, Token) ->
- case jose_jwt:verify(Jwk, Token) of
+ UaaEnv = application:get_env(rabbitmq_auth_backend_oauth2, key_config, []),
+ Verify =
+ case proplists:get_value(algorithms, UaaEnv) of
+ undefined ->
+ jose_jwt:verify(Jwk, Token);
+ Algs ->
+ jose_jwt:verify_strict(Jwk, Algs, Token)
+ end,
+ case Verify of
{true, #jose_jwt{fields = Fields}, _} -> {true, Fields};
{false, #jose_jwt{fields = Fields}, _} -> {false, Fields}
end.
diff --git a/deps/rabbitmq_auth_backend_oauth2/src/wildcard.erl b/deps/rabbitmq_auth_backend_oauth2/src/wildcard.erl
index 01212901c6..5c25cbf272 100644
--- a/deps/rabbitmq_auth_backend_oauth2/src/wildcard.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/src/wildcard.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(wildcard).
@@ -52,7 +52,7 @@ parse_pattern(Pattern) ->
Parts = binary:split(Pattern, <<"*">>, [global]),
try lists:map(fun(Part) -> cow_qs:urldecode(Part) end, Parts)
catch Type:Error ->
- rabbit_log:warning("Invalid pattern ~p : ~p~n",
+ rabbit_log:warning("Invalid pattern ~p : ~p",
[Pattern, {Type, Error}]),
invalid
end.
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/add_uaa_key_command_SUITE.erl b/deps/rabbitmq_auth_backend_oauth2/test/add_uaa_key_command_SUITE.erl
index ba46715db1..86dccca5ce 100644
--- a/deps/rabbitmq_auth_backend_oauth2/test/add_uaa_key_command_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/test/add_uaa_key_command_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(add_uaa_key_command_SUITE).
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE.erl b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE.erl
new file mode 100644
index 0000000000..9a7ada0ece
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE.erl
@@ -0,0 +1,49 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(config_schema_SUITE).
+
+-compile(export_all).
+
+all() ->
+ [run_snippets].
+
+%% -------------------------------------------------------------------
+%% Testsuite setup/teardown.
+%% -------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ rabbit_ct_helpers:log_environment(),
+ Config1 = rabbit_ct_helpers:run_setup_steps(Config),
+ rabbit_ct_config_schema:init_schemas(rabbitmq_auth_backend_oauth2, Config1).
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config).
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config1 = rabbit_ct_helpers:set_config(Config, [{rmq_nodename_suffix, Testcase}]),
+ rabbit_ct_helpers:run_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps()
+ ++ rabbit_ct_client_helpers:setup_steps()).
+
+end_per_testcase(Testcase, Config) ->
+ Config1 =
+ rabbit_ct_helpers:run_steps(Config,
+ rabbit_ct_client_helpers:teardown_steps()
+ ++ rabbit_ct_broker_helpers:teardown_steps()),
+ rabbit_ct_helpers:testcase_finished(Config1, Testcase).
+
+%% -------------------------------------------------------------------
+%% Testcases.
+%% -------------------------------------------------------------------
+
+run_snippets(Config) ->
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, ?MODULE, run_snippets1, [Config]).
+
+run_snippets1(Config) ->
+ rabbit_ct_config_schema:run_snippets(Config).
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/cacert.pem b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/cacert.pem
new file mode 100644
index 0000000000..eaf6b67806
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/cacert.pem
@@ -0,0 +1 @@
+I'm not a certificate
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/cert.pem b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/cert.pem
new file mode 100644
index 0000000000..eaf6b67806
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/cert.pem
@@ -0,0 +1 @@
+I'm not a certificate
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/key.pem b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/key.pem
new file mode 100644
index 0000000000..eaf6b67806
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/certs/key.pem
@@ -0,0 +1 @@
+I'm not a certificate
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/rabbitmq_auth_backend_oauth2.snippets b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/rabbitmq_auth_backend_oauth2.snippets
new file mode 100644
index 0000000000..27976d3abc
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/config_schema_SUITE_data/rabbitmq_auth_backend_oauth2.snippets
@@ -0,0 +1,42 @@
+[
+ {oauth2_pem_config2,
+ "auth_oauth2.resource_server_id = new_resource_server_id
+ auth_oauth2.additional_scopes_key = my_custom_scope_key
+ auth_oauth2.default_key = id1
+ auth_oauth2.signing_keys.id1 = test/config_schema_SUITE_data/certs/key.pem
+ auth_oauth2.signing_keys.id2 = test/config_schema_SUITE_data/certs/cert.pem
+ auth_oauth2.jwks_url = https://my-jwt-issuer/jwks.json
+ auth_oauth2.https.cacertfile = test/config_schema_SUITE_data/certs/cacert.pem
+ auth_oauth2.https.peer_verification = verify_none
+ auth_oauth2.https.depth = 5
+ auth_oauth2.https.fail_if_no_peer_cert = false
+ auth_oauth2.https.hostname_verification = wildcard
+ auth_oauth2.https.crl_check = true
+ auth_oauth2.algorithms.1 = HS256
+ auth_oauth2.algorithms.2 = RS256",
+ [
+ {rabbitmq_auth_backend_oauth2, [
+ {resource_server_id,<<"new_resource_server_id">>},
+ {additional_rabbitmq_scopes, <<"my_custom_scope_key">>},
+ {key_config, [
+ {default_key, <<"id1">>},
+ {signing_keys,
+ #{
+ <<"id1">> => {pem, <<"I'm not a certificate">>},
+ <<"id2">> => {pem, <<"I'm not a certificate">>}
+ }
+ },
+ {jwks_url, "https://my-jwt-issuer/jwks.json"},
+ {cacertfile, "test/config_schema_SUITE_data/certs/cacert.pem"},
+ {peer_verification, verify_none},
+ {depth, 5},
+ {fail_if_no_peer_cert, false},
+ {hostname_verification, wildcard},
+ {crl_check, true},
+ {algorithms, [<<"HS256">>, <<"RS256">>]}
+ ]
+ }
+ ]}
+ ],[]
+ }
+]. \ No newline at end of file
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/jwks_SUITE.erl b/deps/rabbitmq_auth_backend_oauth2/test/jwks_SUITE.erl
new file mode 100644
index 0000000000..2ae29a2761
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/jwks_SUITE.erl
@@ -0,0 +1,464 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(jwks_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-import(rabbit_ct_client_helpers, [close_connection/1, close_channel/1,
+ open_unmanaged_connection/4, open_unmanaged_connection/5,
+ close_connection_and_channel/2]).
+-import(rabbit_mgmt_test_util, [amqp_port/1]).
+
+all() ->
+ [
+ {group, happy_path},
+ {group, unhappy_path},
+ {group, unvalidated_jwks_server},
+ {group, no_peer_verification}
+ ].
+
+groups() ->
+ [
+ {happy_path, [], [
+ test_successful_connection_with_a_full_permission_token_and_all_defaults,
+ test_successful_connection_with_a_full_permission_token_and_explicitly_configured_vhost,
+ test_successful_connection_with_simple_strings_for_aud_and_scope,
+ test_successful_connection_with_complex_claim_as_a_map,
+ test_successful_connection_with_complex_claim_as_a_list,
+ test_successful_connection_with_complex_claim_as_a_binary,
+ test_successful_connection_with_keycloak_token,
+ test_successful_connection_with_algorithm_restriction,
+ test_successful_token_refresh
+ ]},
+ {unhappy_path, [], [
+ test_failed_connection_with_expired_token,
+ test_failed_connection_with_a_non_token,
+ test_failed_connection_with_a_token_with_insufficient_vhost_permission,
+ test_failed_connection_with_a_token_with_insufficient_resource_permission,
+ test_failed_connection_with_algorithm_restriction,
+ test_failed_token_refresh_case1,
+ test_failed_token_refresh_case2
+ ]},
+ {unvalidated_jwks_server, [], [test_failed_connection_with_unvalidated_jwks_server]},
+ {no_peer_verification, [], [{group, happy_path}, {group, unhappy_path}]}
+ ].
+
+%%
+%% Setup and Teardown
+%%
+
+-define(UTIL_MOD, rabbit_auth_backend_oauth2_test_util).
+-define(RESOURCE_SERVER_ID, <<"rabbitmq">>).
+-define(EXTRA_SCOPES_SOURCE, <<"additional_rabbitmq_scopes">>).
+
+init_per_suite(Config) ->
+ rabbit_ct_helpers:log_environment(),
+ rabbit_ct_helpers:run_setup_steps(Config,
+ rabbit_ct_broker_helpers:setup_steps() ++ [
+ fun preconfigure_node/1,
+ fun start_jwks_server/1,
+ fun preconfigure_token/1
+ ]).
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ [
+ fun stop_jwks_server/1
+ ] ++ rabbit_ct_broker_helpers:teardown_steps()).
+
+init_per_group(no_peer_verification, Config) ->
+ add_vhosts(Config),
+ KeyConfig = rabbit_ct_helpers:set_config(?config(key_config, Config), [{jwks_url, ?config(non_strict_jwks_url, Config)}, {peer_verification, verify_none}]),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env, [rabbitmq_auth_backend_oauth2, key_config, KeyConfig]),
+ rabbit_ct_helpers:set_config(Config, {key_config, KeyConfig});
+
+init_per_group(_Group, Config) ->
+ add_vhosts(Config),
+ Config.
+
+end_per_group(no_peer_verification, Config) ->
+ delete_vhosts(Config),
+ KeyConfig = rabbit_ct_helpers:set_config(?config(key_config, Config), [{jwks_url, ?config(strict_jwks_url, Config)}, {peer_verification, verify_peer}]),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env, [rabbitmq_auth_backend_oauth2, key_config, KeyConfig]),
+ rabbit_ct_helpers:set_config(Config, {key_config, KeyConfig});
+
+end_per_group(_Group, Config) ->
+ delete_vhosts(Config),
+ Config.
+
+add_vhosts(Config) ->
+ %% The broker is managed by {init,end}_per_testcase().
+ lists:foreach(fun(Value) -> rabbit_ct_broker_helpers:add_vhost(Config, Value) end,
+ [<<"vhost1">>, <<"vhost2">>, <<"vhost3">>, <<"vhost4">>]).
+
+delete_vhosts(Config) ->
+ %% The broker is managed by {init,end}_per_testcase().
+ lists:foreach(fun(Value) -> rabbit_ct_broker_helpers:delete_vhost(Config, Value) end,
+ [<<"vhost1">>, <<"vhost2">>, <<"vhost3">>, <<"vhost4">>]).
+
+init_per_testcase(Testcase, Config) when Testcase =:= test_successful_connection_with_a_full_permission_token_and_explicitly_configured_vhost orelse
+ Testcase =:= test_successful_token_refresh ->
+ rabbit_ct_broker_helpers:add_vhost(Config, <<"vhost1">>),
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config;
+
+init_per_testcase(Testcase, Config) when Testcase =:= test_failed_token_refresh_case1 orelse
+ Testcase =:= test_failed_token_refresh_case2 ->
+ rabbit_ct_broker_helpers:add_vhost(Config, <<"vhost4">>),
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config;
+
+init_per_testcase(Testcase, Config) when Testcase =:= test_successful_connection_with_complex_claim_as_a_map orelse
+ Testcase =:= test_successful_connection_with_complex_claim_as_a_list orelse
+ Testcase =:= test_successful_connection_with_complex_claim_as_a_binary ->
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env,
+ [rabbitmq_auth_backend_oauth2, extra_scopes_source, ?EXTRA_SCOPES_SOURCE]),
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config;
+
+init_per_testcase(Testcase, Config) when Testcase =:= test_successful_connection_with_algorithm_restriction ->
+ KeyConfig = ?config(key_config, Config),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env, [rabbitmq_auth_backend_oauth2, key_config, [{algorithms, [<<"HS256">>]} | KeyConfig]]),
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config;
+
+init_per_testcase(Testcase, Config) when Testcase =:= test_failed_connection_with_algorithm_restriction ->
+ KeyConfig = ?config(key_config, Config),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env, [rabbitmq_auth_backend_oauth2, key_config, [{algorithms, [<<"RS256">>]} | KeyConfig]]),
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config;
+
+init_per_testcase(Testcase, Config) when Testcase =:= test_failed_connection_with_unvalidated_jwks_server ->
+ KeyConfig = rabbit_ct_helpers:set_config(?config(key_config, Config), {jwks_url, ?config(non_strict_jwks_url, Config)}),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env, [rabbitmq_auth_backend_oauth2, key_config, KeyConfig]),
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config;
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config.
+
+end_per_testcase(Testcase, Config) when Testcase =:= test_failed_token_refresh_case1 orelse
+ Testcase =:= test_failed_token_refresh_case2 ->
+ rabbit_ct_broker_helpers:delete_vhost(Config, <<"vhost4">>),
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config;
+
+end_per_testcase(Testcase, Config) when Testcase =:= test_successful_connection_with_complex_claim_as_a_map orelse
+ Testcase =:= test_successful_connection_with_complex_claim_as_a_list orelse
+ Testcase =:= test_successful_connection_with_complex_claim_as_a_binary ->
+ rabbit_ct_broker_helpers:delete_vhost(Config, <<"vhost1">>),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env,
+ [rabbitmq_auth_backend_oauth2, extra_scopes_source, undefined]),
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config;
+
+end_per_testcase(Testcase, Config) when Testcase =:= test_successful_connection_with_algorithm_restriction orelse
+ Testcase =:= test_failed_connection_with_algorithm_restriction orelse
+ Testcase =:= test_failed_connection_with_unvalidated_jwks_server ->
+ rabbit_ct_broker_helpers:delete_vhost(Config, <<"vhost1">>),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env, [rabbitmq_auth_backend_oauth2, key_config, ?config(key_config, Config)]),
+ rabbit_ct_helpers:testcase_finished(Config, Testcase),
+ Config;
+
+end_per_testcase(Testcase, Config) ->
+ rabbit_ct_broker_helpers:delete_vhost(Config, <<"vhost1">>),
+ rabbit_ct_helpers:testcase_finished(Config, Testcase),
+ Config.
+
+preconfigure_node(Config) ->
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env,
+ [rabbit, auth_backends, [rabbit_auth_backend_oauth2]]),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env,
+ [rabbitmq_auth_backend_oauth2, resource_server_id, ?RESOURCE_SERVER_ID]),
+ Config.
+
+start_jwks_server(Config) ->
+ Jwk = ?UTIL_MOD:fixture_jwk(),
+ %% Assume we don't have more than 100 ports allocated for tests
+ PortBase = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_ports_base),
+ JwksServerPort = PortBase + 100,
+
+ %% Both URLs direct to the same JWKS server
+ %% The NonStrictJwksUrl identity cannot be validated while StrictJwksUrl identity can be validated
+ NonStrictJwksUrl = "https://127.0.0.1:" ++ integer_to_list(JwksServerPort) ++ "/jwks",
+ StrictJwksUrl = "https://localhost:" ++ integer_to_list(JwksServerPort) ++ "/jwks",
+
+ ok = application:set_env(jwks_http, keys, [Jwk]),
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(cowboy),
+ CertsDir = ?config(rmq_certsdir, Config),
+ ok = jwks_http_app:start(JwksServerPort, CertsDir),
+ KeyConfig = [{jwks_url, StrictJwksUrl},
+ {peer_verification, verify_peer},
+ {cacertfile, filename:join([CertsDir, "testca", "cacert.pem"])}],
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env,
+ [rabbitmq_auth_backend_oauth2, key_config, KeyConfig]),
+ rabbit_ct_helpers:set_config(Config,
+ [{non_strict_jwks_url, NonStrictJwksUrl},
+ {strict_jwks_url, StrictJwksUrl},
+ {key_config, KeyConfig},
+ {fixture_jwk, Jwk}]).
+
+stop_jwks_server(Config) ->
+ ok = jwks_http_app:stop(),
+ Config.
+
+generate_valid_token(Config) ->
+ generate_valid_token(Config, ?UTIL_MOD:full_permission_scopes()).
+
+generate_valid_token(Config, Scopes) ->
+ generate_valid_token(Config, Scopes, undefined).
+
+generate_valid_token(Config, Scopes, Audience) ->
+ Jwk = case rabbit_ct_helpers:get_config(Config, fixture_jwk) of
+ undefined -> ?UTIL_MOD:fixture_jwk();
+ Value -> Value
+ end,
+ Token = case Audience of
+ undefined -> ?UTIL_MOD:fixture_token_with_scopes(Scopes);
+ DefinedAudience -> maps:put(<<"aud">>, DefinedAudience, ?UTIL_MOD:fixture_token_with_scopes(Scopes))
+ end,
+ ?UTIL_MOD:sign_token_hs(Token, Jwk).
+
+generate_valid_token_with_extra_fields(Config, ExtraFields) ->
+ Jwk = case rabbit_ct_helpers:get_config(Config, fixture_jwk) of
+ undefined -> ?UTIL_MOD:fixture_jwk();
+ Value -> Value
+ end,
+ Token = maps:merge(?UTIL_MOD:fixture_token_with_scopes([]), ExtraFields),
+ ?UTIL_MOD:sign_token_hs(Token, Jwk).
+
+generate_expired_token(Config) ->
+ generate_expired_token(Config, ?UTIL_MOD:full_permission_scopes()).
+
+generate_expired_token(Config, Scopes) ->
+ Jwk = case rabbit_ct_helpers:get_config(Config, fixture_jwk) of
+ undefined -> ?UTIL_MOD:fixture_jwk();
+ Value -> Value
+ end,
+ ?UTIL_MOD:sign_token_hs(?UTIL_MOD:expired_token_with_scopes(Scopes), Jwk).
+
+generate_expirable_token(Config, Seconds) ->
+ generate_expirable_token(Config, ?UTIL_MOD:full_permission_scopes(), Seconds).
+
+generate_expirable_token(Config, Scopes, Seconds) ->
+ Jwk = case rabbit_ct_helpers:get_config(Config, fixture_jwk) of
+ undefined -> ?UTIL_MOD:fixture_jwk();
+ Value -> Value
+ end,
+ Expiration = os:system_time(seconds) + Seconds,
+ ?UTIL_MOD:sign_token_hs(?UTIL_MOD:token_with_scopes_and_expiration(Scopes, Expiration), Jwk).
+
+preconfigure_token(Config) ->
+ Token = generate_valid_token(Config),
+ rabbit_ct_helpers:set_config(Config, {fixture_jwt, Token}).
+
+%%
+%% Test Cases
+%%
+
+test_successful_connection_with_a_full_permission_token_and_all_defaults(Config) ->
+ {_Algo, Token} = rabbit_ct_helpers:get_config(Config, fixture_jwt),
+ Conn = open_unmanaged_connection(Config, 0, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ close_connection_and_channel(Conn, Ch).
+
+test_successful_connection_with_a_full_permission_token_and_explicitly_configured_vhost(Config) ->
+ {_Algo, Token} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost1/*">>,
+ <<"rabbitmq.write:vhost1/*">>,
+ <<"rabbitmq.read:vhost1/*">>]),
+ Conn = open_unmanaged_connection(Config, 0, <<"vhost1">>, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ close_connection_and_channel(Conn, Ch).
+
+test_successful_connection_with_simple_strings_for_aud_and_scope(Config) ->
+ {_Algo, Token} = generate_valid_token(
+ Config,
+ <<"rabbitmq.configure:*/* rabbitmq.write:*/* rabbitmq.read:*/*">>,
+ <<"hare rabbitmq">>
+ ),
+ Conn = open_unmanaged_connection(Config, 0, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ close_connection_and_channel(Conn, Ch).
+
+test_successful_connection_with_complex_claim_as_a_map(Config) ->
+ {_Algo, Token} = generate_valid_token_with_extra_fields(
+ Config,
+ #{<<"additional_rabbitmq_scopes">> => #{<<"rabbitmq">> => [<<"configure:*/*">>, <<"read:*/*">>, <<"write:*/*">>]}}
+ ),
+ Conn = open_unmanaged_connection(Config, 0, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ close_connection_and_channel(Conn, Ch).
+
+test_successful_connection_with_complex_claim_as_a_list(Config) ->
+ {_Algo, Token} = generate_valid_token_with_extra_fields(
+ Config,
+ #{<<"additional_rabbitmq_scopes">> => [<<"rabbitmq.configure:*/*">>, <<"rabbitmq.read:*/*">>, <<"rabbitmq.write:*/*">>]}
+ ),
+ Conn = open_unmanaged_connection(Config, 0, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ close_connection_and_channel(Conn, Ch).
+
+test_successful_connection_with_complex_claim_as_a_binary(Config) ->
+ {_Algo, Token} = generate_valid_token_with_extra_fields(
+ Config,
+ #{<<"additional_rabbitmq_scopes">> => <<"rabbitmq.configure:*/* rabbitmq.read:*/*" "rabbitmq.write:*/*">>}
+ ),
+ Conn = open_unmanaged_connection(Config, 0, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ close_connection_and_channel(Conn, Ch).
+
+test_successful_connection_with_keycloak_token(Config) ->
+ {_Algo, Token} = generate_valid_token_with_extra_fields(
+ Config,
+ #{<<"authorization">> => #{<<"permissions">> =>
+ [#{<<"rsid">> => <<"2c390fe4-02ad-41c7-98a2-cebb8c60ccf1">>,
+ <<"rsname">> => <<"allvhost">>,
+ <<"scopes">> => [<<"rabbitmq.configure:*/*">>]},
+ #{<<"rsid">> => <<"e7f12e94-4c34-43d8-b2b1-c516af644cee">>,
+ <<"rsname">> => <<"vhost1">>,
+ <<"scopes">> => [<<"rabbitmq.write:*/*">>]},
+ #{<<"rsid">> => <<"12ac3d1c-28c2-4521-8e33-0952eff10bd9">>,
+ <<"rsname">> => <<"Default Resource">>,
+ <<"scopes">> => [<<"rabbitmq.read:*/*">>]},
+ %% this one won't be used because of the resource id
+ #{<<"rsid">> => <<"bee8fac6-c3ec-11e9-aa8c-2a2ae2dbcce4">>,
+ <<"rsname">> => <<"Default Resource">>,
+ <<"scopes">> => [<<"rabbitmq-resource-read">>]}]}}
+ ),
+ Conn = open_unmanaged_connection(Config, 0, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ close_connection_and_channel(Conn, Ch).
+
+test_successful_token_refresh(Config) ->
+ Duration = 5,
+ {_Algo, Token} = generate_expirable_token(Config, [<<"rabbitmq.configure:vhost1/*">>,
+ <<"rabbitmq.write:vhost1/*">>,
+ <<"rabbitmq.read:vhost1/*">>],
+ Duration),
+ Conn = open_unmanaged_connection(Config, 0, <<"vhost1">>, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+
+ {_Algo2, Token2} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost1/*">>,
+ <<"rabbitmq.write:vhost1/*">>,
+ <<"rabbitmq.read:vhost1/*">>]),
+ ?UTIL_MOD:wait_for_token_to_expire(timer:seconds(Duration)),
+ ?assertEqual(ok, amqp_connection:update_secret(Conn, Token2, <<"token refresh">>)),
+
+ {ok, Ch2} = amqp_connection:open_channel(Conn),
+
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch2, #'queue.declare'{exclusive = true}),
+
+ amqp_channel:close(Ch2),
+ close_connection_and_channel(Conn, Ch).
+
+test_successful_connection_with_algorithm_restriction(Config) ->
+ {_Algo, Token} = rabbit_ct_helpers:get_config(Config, fixture_jwt),
+ Conn = open_unmanaged_connection(Config, 0, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+ close_connection_and_channel(Conn, Ch).
+
+test_failed_connection_with_expired_token(Config) ->
+ {_Algo, Token} = generate_expired_token(Config, [<<"rabbitmq.configure:vhost1/*">>,
+ <<"rabbitmq.write:vhost1/*">>,
+ <<"rabbitmq.read:vhost1/*">>]),
+ ?assertMatch({error, {auth_failure, _}},
+ open_unmanaged_connection(Config, 0, <<"vhost1">>, <<"username">>, Token)).
+
+test_failed_connection_with_a_non_token(Config) ->
+ ?assertMatch({error, {auth_failure, _}},
+ open_unmanaged_connection(Config, 0, <<"vhost1">>, <<"username">>, <<"a-non-token-value">>)).
+
+test_failed_connection_with_a_token_with_insufficient_vhost_permission(Config) ->
+ {_Algo, Token} = generate_valid_token(Config, [<<"rabbitmq.configure:alt-vhost/*">>,
+ <<"rabbitmq.write:alt-vhost/*">>,
+ <<"rabbitmq.read:alt-vhost/*">>]),
+ ?assertEqual({error, not_allowed},
+ open_unmanaged_connection(Config, 0, <<"off-limits-vhost">>, <<"username">>, Token)).
+
+test_failed_connection_with_a_token_with_insufficient_resource_permission(Config) ->
+ {_Algo, Token} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost2/jwt*">>,
+ <<"rabbitmq.write:vhost2/jwt*">>,
+ <<"rabbitmq.read:vhost2/jwt*">>]),
+ Conn = open_unmanaged_connection(Config, 0, <<"vhost2">>, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ ?assertExit({{shutdown, {server_initiated_close, 403, _}}, _},
+ amqp_channel:call(Ch, #'queue.declare'{queue = <<"alt-prefix.eq.1">>, exclusive = true})),
+ close_connection(Conn).
+
+test_failed_token_refresh_case1(Config) ->
+ {_Algo, Token} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost4/*">>,
+ <<"rabbitmq.write:vhost4/*">>,
+ <<"rabbitmq.read:vhost4/*">>]),
+ Conn = open_unmanaged_connection(Config, 0, <<"vhost4">>, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+
+ {_Algo2, Token2} = generate_expired_token(Config, [<<"rabbitmq.configure:vhost4/*">>,
+ <<"rabbitmq.write:vhost4/*">>,
+ <<"rabbitmq.read:vhost4/*">>]),
+ %% the error is communicated asynchronously via a connection-level error
+ ?assertEqual(ok, amqp_connection:update_secret(Conn, Token2, <<"token refresh">>)),
+
+ {ok, Ch2} = amqp_connection:open_channel(Conn),
+ ?assertExit({{shutdown, {server_initiated_close, 403, _}}, _},
+ amqp_channel:call(Ch2, #'queue.declare'{queue = <<"a.q">>, exclusive = true})),
+
+ close_connection(Conn).
+
+test_failed_token_refresh_case2(Config) ->
+ {_Algo, Token} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost4/*">>,
+ <<"rabbitmq.write:vhost4/*">>,
+ <<"rabbitmq.read:vhost4/*">>]),
+ Conn = open_unmanaged_connection(Config, 0, <<"vhost4">>, <<"username">>, Token),
+ {ok, Ch} = amqp_connection:open_channel(Conn),
+ #'queue.declare_ok'{queue = _} =
+ amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
+
+ %% the error is communicated asynchronously via a connection-level error
+ ?assertEqual(ok, amqp_connection:update_secret(Conn, <<"not-a-token-^^^^5%">>, <<"token refresh">>)),
+
+ ?assertExit({{shutdown, {connection_closing, {server_initiated_close, 530, _}}}, _},
+ amqp_connection:open_channel(Conn)),
+
+ close_connection(Conn).
+
+test_failed_connection_with_algorithm_restriction(Config) ->
+ {_Algo, Token} = rabbit_ct_helpers:get_config(Config, fixture_jwt),
+ ?assertMatch({error, {auth_failure, _}},
+ open_unmanaged_connection(Config, 0, <<"username">>, Token)).
+
+test_failed_connection_with_unvalidated_jwks_server(Config) ->
+ {_Algo, Token} = rabbit_ct_helpers:get_config(Config, fixture_jwt),
+ ?assertMatch({error, {auth_failure, _}},
+ open_unmanaged_connection(Config, 0, <<"username">>, Token)).
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_app.erl b/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_app.erl
new file mode 100644
index 0000000000..c745e436f6
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_app.erl
@@ -0,0 +1,22 @@
+-module(jwks_http_app).
+
+-export([start/2, stop/0]).
+
+start(Port, CertsDir) ->
+ Dispatch =
+ cowboy_router:compile(
+ [
+ {'_', [
+ {"/jwks", jwks_http_handler, []}
+ ]}
+ ]
+ ),
+ {ok, _} = cowboy:start_tls(jwks_http_listener,
+ [{port, Port},
+ {certfile, filename:join([CertsDir, "server", "cert.pem"])},
+ {keyfile, filename:join([CertsDir, "server", "key.pem"])}],
+ #{env => #{dispatch => Dispatch}}),
+ ok.
+
+stop() ->
+ ok = cowboy:stop_listener(jwks_http_listener).
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_handler.erl b/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_handler.erl
new file mode 100644
index 0000000000..b4baad1c45
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_handler.erl
@@ -0,0 +1,14 @@
+-module(jwks_http_handler).
+-behavior(cowboy_handler).
+
+-export([init/2, terminate/3]).
+
+init(Req, State) ->
+ {ok, Keys} = application:get_env(jwks_http, keys),
+ Body = jsx:encode(#{keys => Keys}),
+ Headers = #{<<"content-type">> => <<"application/json">>},
+ Req2 = cowboy_req:reply(200, Headers, Body, Req),
+ {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+ ok.
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_sup.erl b/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_sup.erl
new file mode 100644
index 0000000000..c0130cd083
--- /dev/null
+++ b/deps/rabbitmq_auth_backend_oauth2/test/jwks_http_sup.erl
@@ -0,0 +1,11 @@
+-module(jwks_http_sup).
+-behaviour(supervisor).
+
+-export([start_link/0, init/1]).
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+ Procs = [],
+ {ok, {{one_for_one, 1, 5}, Procs}}.
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/rabbit_auth_backend_oauth2_test_util.erl b/deps/rabbitmq_auth_backend_oauth2/test/rabbit_auth_backend_oauth2_test_util.erl
index 5b8ed5f837..80b6648352 100644
--- a/deps/rabbitmq_auth_backend_oauth2/test/rabbit_auth_backend_oauth2_test_util.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/test/rabbit_auth_backend_oauth2_test_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_backend_oauth2_test_util).
@@ -73,7 +73,7 @@ expired_token_with_scopes(Scopes) ->
token_with_scopes_and_expiration(Scopes, os:system_time(seconds) - 10).
fixture_token_with_scopes(Scopes) ->
- token_with_scopes_and_expiration(Scopes, os:system_time(seconds) + 10).
+ token_with_scopes_and_expiration(Scopes, os:system_time(seconds) + 30).
token_with_scopes_and_expiration(Scopes, Expiration) ->
%% expiration is a timestamp with precision in seconds
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/scope_SUITE.erl b/deps/rabbitmq_auth_backend_oauth2/test/scope_SUITE.erl
index 1338f28f50..771e5b6786 100644
--- a/deps/rabbitmq_auth_backend_oauth2/test/scope_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/test/scope_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(scope_SUITE).
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/system_SUITE.erl b/deps/rabbitmq_auth_backend_oauth2/test/system_SUITE.erl
index bb98b469a3..fa63eef3e0 100644
--- a/deps/rabbitmq_auth_backend_oauth2/test/system_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/test/system_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
@@ -282,16 +282,16 @@ test_successful_connection_with_keycloak_token(Config) ->
test_successful_token_refresh(Config) ->
Duration = 5,
- {_Algo, Token} = generate_expirable_token(Config, [<<"rabbitmq.configure:vhost1/*">>,
- <<"rabbitmq.write:vhost1/*">>,
- <<"rabbitmq.read:vhost1/*">>],
- Duration),
+ {_, Token} = generate_expirable_token(Config, [<<"rabbitmq.configure:vhost1/*">>,
+ <<"rabbitmq.write:vhost1/*">>,
+ <<"rabbitmq.read:vhost1/*">>],
+ Duration),
Conn = open_unmanaged_connection(Config, 0, <<"vhost1">>, <<"username">>, Token),
{ok, Ch} = amqp_connection:open_channel(Conn),
- {_Algo, Token2} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost1/*">>,
- <<"rabbitmq.write:vhost1/*">>,
- <<"rabbitmq.read:vhost1/*">>]),
+ {_, Token2} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost1/*">>,
+ <<"rabbitmq.write:vhost1/*">>,
+ <<"rabbitmq.read:vhost1/*">>]),
?UTIL_MOD:wait_for_token_to_expire(timer:seconds(Duration)),
?assertEqual(ok, amqp_connection:update_secret(Conn, Token2, <<"token refresh">>)),
@@ -335,17 +335,17 @@ test_failed_connection_with_a_token_with_insufficient_resource_permission(Config
close_connection(Conn).
test_failed_token_refresh_case1(Config) ->
- {_Algo, Token} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost4/*">>,
- <<"rabbitmq.write:vhost4/*">>,
- <<"rabbitmq.read:vhost4/*">>]),
+ {_, Token} = generate_valid_token(Config, [<<"rabbitmq.configure:vhost4/*">>,
+ <<"rabbitmq.write:vhost4/*">>,
+ <<"rabbitmq.read:vhost4/*">>]),
Conn = open_unmanaged_connection(Config, 0, <<"vhost4">>, <<"username">>, Token),
{ok, Ch} = amqp_connection:open_channel(Conn),
#'queue.declare_ok'{queue = _} =
amqp_channel:call(Ch, #'queue.declare'{exclusive = true}),
- {_Algo, Token2} = generate_expired_token(Config, [<<"rabbitmq.configure:vhost4/*">>,
- <<"rabbitmq.write:vhost4/*">>,
- <<"rabbitmq.read:vhost4/*">>]),
+ {_, Token2} = generate_expired_token(Config, [<<"rabbitmq.configure:vhost4/*">>,
+ <<"rabbitmq.write:vhost4/*">>,
+ <<"rabbitmq.read:vhost4/*">>]),
%% the error is communicated asynchronously via a connection-level error
?assertEqual(ok, amqp_connection:update_secret(Conn, Token2, <<"token refresh">>)),
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/unit_SUITE.erl b/deps/rabbitmq_auth_backend_oauth2/test/unit_SUITE.erl
index f1ed34fabf..5441944089 100644
--- a/deps/rabbitmq_auth_backend_oauth2/test/unit_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
diff --git a/deps/rabbitmq_auth_backend_oauth2/test/wildcard_match_SUITE.erl b/deps/rabbitmq_auth_backend_oauth2/test/wildcard_match_SUITE.erl
index 6996a95f80..cd7059979f 100644
--- a/deps/rabbitmq_auth_backend_oauth2/test/wildcard_match_SUITE.erl
+++ b/deps/rabbitmq_auth_backend_oauth2/test/wildcard_match_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(wildcard_match_SUITE).
diff --git a/deps/rabbitmq_auth_mechanism_ssl/BUILD.bazel b/deps/rabbitmq_auth_mechanism_ssl/BUILD.bazel
new file mode 100644
index 0000000000..8270050b7b
--- /dev/null
+++ b/deps/rabbitmq_auth_mechanism_ssl/BUILD.bazel
@@ -0,0 +1,42 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_auth_mechanism_ssl"
+
+APP_DESCRIPTION = "RabbitMQ SSL authentication (SASL EXTERNAL)"
+
+APP_MODULE = "rabbit_auth_mechanism_ssl_app"
+
+APP_ENV = """[
+ {name_from, distinguished_name}
+ ]"""
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
diff --git a/deps/rabbitmq_auth_mechanism_ssl/Makefile b/deps/rabbitmq_auth_mechanism_ssl/Makefile
index 11a44a8d7f..583668789e 100644
--- a/deps/rabbitmq_auth_mechanism_ssl/Makefile
+++ b/deps/rabbitmq_auth_mechanism_ssl/Makefile
@@ -23,5 +23,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_auth_mechanism_ssl/README.md b/deps/rabbitmq_auth_mechanism_ssl/README.md
index 47f855ba56..d1bad3ec94 100644
--- a/deps/rabbitmq_auth_mechanism_ssl/README.md
+++ b/deps/rabbitmq_auth_mechanism_ssl/README.md
@@ -63,6 +63,33 @@ or from an existing amqps connection with commands like:
rabbitmqctl list_connections peer_cert_subject
```
+#### Subject Alternative Name
+
+To extract username from a Subject Alternative Name (SAN) field, a few
+settings need to be configured. Since a certificate can have more than
+one SAN field and they can represent identities of different types,
+the type and the index of the field to use must be provided.
+
+For example, to use the first SAN value of type DNS:
+
+``` ini
+auth_mechanisms.1 = EXTERNAL
+
+ssl_cert_login_from = subject_alternative_name
+ssl_cert_login_san_type = dns
+ssl_cert_login_san_index = 0
+```
+
+Or of type email:
+
+``` ini
+auth_mechanisms.1 = EXTERNAL
+
+ssl_cert_login_from = subject_alternative_name
+ssl_cert_login_san_type = email
+ssl_cert_login_san_index = 0
+```
+
#### Common Name
To use the Common Name instead, set `rabbit.ssl_cert_login_from` to `common_name`:
@@ -79,8 +106,28 @@ the internal node database by default but could include other
backends if so configured.
+## Usage for MQTT Clients
+
+To use this plugin with MQTT clients, set `mqtt.ssl_cert_login` to `true`:
+
+``` ini
+# It makes no sense to allow or expect anonymous client connections
+# with certificate-based authentication
+mqtt.allow_anonymous = false
+
+# require the peer to provide a certificate, enforce certificate exchange
+ssl_options.verify = verify_peer
+ssl_options.fail_if_no_peer_cert = true
+
+# allow MQTT connections to compute their name from client certificate's CN
+# (for simplicity: CN has been deprecated in favor of SAN for a long time)
+mqtt.ssl_cert_login = true
+ssl_cert_login_from = common_name
+```
+
+
## Copyright & License
-(c) 2007-2020 VMware, Inc. or its affiliates.
+(c) 2007-2021 VMware, Inc. or its affiliates.
Released under the same license as RabbitMQ.
diff --git a/deps/rabbitmq_auth_mechanism_ssl/erlang.mk b/deps/rabbitmq_auth_mechanism_ssl/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_auth_mechanism_ssl/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk b/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl b/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl
index 335dbbc2c5..c8c346467a 100644
--- a/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl
+++ b/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
@@ -49,14 +49,16 @@ init(Sock) ->
case rabbit_ssl:peer_cert_auth_name(C) of
unsafe -> {refused, none, "TLS configuration is unsafe", []};
not_found -> {refused, none, "no name found", []};
- Name -> rabbit_data_coercion:to_binary(Name)
+ Name ->
+ Val = rabbit_data_coercion:to_binary(Name),
+ rabbit_log:debug("auth mechanism TLS extracted username '~s' from peer certificate", [Val]),
+ Val
end;
{error, no_peercert} ->
{refused, none, "connection peer presented no TLS (x.509) certificate", []};
nossl ->
{refused, none, "not a TLS-enabled connection", []}
end,
- rabbit_log:debug("auth mechanism TLS extracted username '~s' from peer certificate", [Username]),
#state{username = Username}.
handle_response(_Response, #state{username = Username}) ->
diff --git a/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl b/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl
index 6b3af6acc0..5e2eac48a8 100644
--- a/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl
+++ b/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_auth_mechanism_ssl_app).
@@ -11,7 +11,7 @@
-export([start/2, stop/1]).
%% Dummy supervisor - see Ulf Wiger's comment at
-%% http://erlang.2086793.n4.nabble.com/initializing-library-applications-without-processes-td2094473.html
+%% http://erlang.org/pipermail/erlang-questions/2010-April/050508.html
-behaviour(supervisor).
-export([init/1]).
diff --git a/deps/rabbitmq_aws/BUILD.bazel b/deps/rabbitmq_aws/BUILD.bazel
new file mode 100644
index 0000000000..aba4985632
--- /dev/null
+++ b/deps/rabbitmq_aws/BUILD.bazel
@@ -0,0 +1,67 @@
+load("@bazel-erlang//:eunit.bzl", "eunit")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load("//:rabbitmq.bzl", "RABBITMQ_TEST_ERLC_OPTS", "rabbitmq_lib")
+
+APP_NAME = "rabbitmq_aws"
+
+APP_DESCRIPTION = "A minimalistic AWS API interface used by rabbitmq-autocluster (3.6.x) and other RabbitMQ plugins"
+
+APP_MODULE = "rabbitmq_aws_app"
+
+APP_REGISTERED = [
+ "rabbitmq_aws",
+]
+
+EXTRA_APPS = [
+ "crypto",
+ "inets",
+ "ssl",
+ "xmerl",
+]
+
+BUILD_DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ app_registered = APP_REGISTERED,
+ build_deps = BUILD_DEPS,
+ extra_apps = EXTRA_APPS,
+)
+
+xref(
+ additional_libs = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@credentials_obfuscation//:bazel_erlang_lib",
+ "@jsx//:bazel_erlang_lib",
+ "@ranch//:bazel_erlang_lib",
+ "@recon//:bazel_erlang_lib",
+ ],
+ tags = ["xref"],
+)
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+eunit(
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS + [
+ "-I deps/rabbitmq_aws",
+ "+nowarn_export_all",
+ ],
+ runtime_deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@meck//:bazel_erlang_lib",
+ ],
+)
diff --git a/deps/rabbitmq_aws/Makefile b/deps/rabbitmq_aws/Makefile
index 8da412b4f7..85c1fc9d11 100644
--- a/deps/rabbitmq_aws/Makefile
+++ b/deps/rabbitmq_aws/Makefile
@@ -18,5 +18,5 @@ TEST_DEPS = meck
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_aws/README.md b/deps/rabbitmq_aws/README.md
index 9d87ab6bcd..b6ff709657 100644
--- a/deps/rabbitmq_aws/README.md
+++ b/deps/rabbitmq_aws/README.md
@@ -34,7 +34,16 @@ The credentials values have the following precedence:
- Environment variables
- Credentials file
- EC2 Instance Metadata Service
-
+
+### EC2 Instance Metadata Service Versions
+
+There are two versions of the EC2 Instance Metadata Service (IMDS) that are available by default on EC2 instances; IMDSv1 and IMDSv2 which is protected by session authentication
+and [adds defenses against additional vulnerabilities](https://aws.amazon.com/blogs/security/defense-in-depth-open-firewalls-reverse-proxies-ssrf-vulnerabilities-ec2-instance-metadata-service/).
+AWS recommends adopting IMDSv2 and disabling IMDSv1 [by configuring the Instance Metadata Service on the EC2 instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html).
+
+By default *rabbitmq-aws* will attempt to use IMDSv2 first and will fallback to use IMDSv1 if calls to IMDSv2 fail. This behavior can be overridden
+by setting the ``aws.prefer_imdsv2`` setting to ``false``.
+
### Environment Variables
As with the AWS CLI, the following environment variables can be used to provide
@@ -53,7 +62,9 @@ configuration or to impact configuration behavior:
---------------------------------------|--------------------------------------------------------------------------------------------
``rabbitmq_aws:set_region/1`` | Manually specify the AWS region to make requests to.
``rabbitmq_aws:set_credentials/2`` | Manually specify the request credentials to use.
- ``rabbitmq_aws:refresh_credentials/0`` | Refresh the credentials from the environment, filesystem, or EC2 Instance Metadata service.
+ ``rabbitmq_aws:refresh_credentials/0`` | Refresh the credentials from the environment, filesystem, or EC2 Instance Metadata Service.
+ ``rabbitmq_aws:ensure_imdsv2_token_valid/0`` | Make sure EC2 IMDSv2 token is active and valid.
+ ``rabbitmq_aws:api_get_request/2`` | Perform an AWS service API request.
``rabbitmq_aws:get/2`` | Perform a GET request to the API specifying the service and request path.
``rabbitmq_aws:get/3`` | Perform a GET request specifying the service, path, and headers.
``rabbitmq_aws:post/4`` | Perform a POST request specifying the service, path, headers, and body.
@@ -65,7 +76,7 @@ configuration or to impact configuration behavior:
## Example Usage
The following example assumes that you either have locally configured credentials or that
-you're using the AWS Instance Metadata service for credentials:
+you're using the EC2 Instance Metadata Service for credentials:
```erlang
application:start(rabbitmq_aws).
diff --git a/deps/rabbitmq_aws/erlang.mk b/deps/rabbitmq_aws/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_aws/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_aws/include/rabbitmq_aws.hrl b/deps/rabbitmq_aws/include/rabbitmq_aws.hrl
index 5c5a478a59..6b104c10bc 100644
--- a/deps/rabbitmq_aws/include/rabbitmq_aws.hrl
+++ b/deps/rabbitmq_aws/include/rabbitmq_aws.hrl
@@ -29,6 +29,22 @@
-define(INSTANCE_CREDENTIALS, "iam/security-credentials").
-define(INSTANCE_METADATA_BASE, "latest/meta-data").
+-define(INSTANCE_ID, "instance-id").
+
+-define(TOKEN_URL, "latest/api/token").
+
+-define(METADATA_TOKEN_TTL_HEADER, "X-aws-ec2-metadata-token-ttl-seconds").
+
+% EC2 Instance Metadata service version 2 (IMDSv2) uses session-oriented authentication.
+% Instance metadata service requests are only needed for loading/refreshing credentials.
+% Long-lived EC2 IMDSv2 tokens are unnecessary. The token only needs to be valid long enough
+% to successfully load/refresh the credentials. 60 seconds is more than enough time to accomplish this.
+-define(METADATA_TOKEN_TTL_SECONDS, 60).
+
+-define(METADATA_TOKEN, "X-aws-ec2-metadata-token").
+
+-define(LINEAR_BACK_OFF_MILLIS, 500).
+-define(MAX_RETRIES, 5).
-type access_key() :: nonempty_string().
-type secret_access_key() :: nonempty_string().
@@ -41,11 +57,17 @@
-type sc_error() :: {error, Reason :: atom()}.
-type security_credentials() :: sc_ok() | sc_error().
+-record(imdsv2token, { token :: security_token() | undefined,
+ expiration :: expiration() | undefined}).
+
+-type imdsv2token() :: #imdsv2token{}.
+
-record(state, {access_key :: access_key() | undefined,
secret_access_key :: secret_access_key() | undefined,
expiration :: expiration() | undefined,
security_token :: security_token() | undefined,
region :: region() | undefined,
+ imdsv2_token:: imdsv2token() | undefined,
error :: atom() | string() | undefined}).
-type state() :: #state{}.
diff --git a/deps/rabbitmq_aws/priv/schema/rabbitmq_aws.schema b/deps/rabbitmq_aws/priv/schema/rabbitmq_aws.schema
new file mode 100644
index 0000000000..8803783359
--- /dev/null
+++ b/deps/rabbitmq_aws/priv/schema/rabbitmq_aws.schema
@@ -0,0 +1,19 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+% ===============================
+% AWS section
+% ===============================
+
+%% @doc Whether or not to prefer EC2 IMDSv2 when querying instance metadata service.
+%% The setting defaults to true. When true, instance metadata will first be attempted using EC2 IMDSv2
+%% and will fall back to EC2 IMDSv1 upon failure.
+%% When false, EC2 IMDSv1 will be used first and no attempt will be made to use EC2 IMDSv2.
+%% See https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html.
+
+{mapping, "aws.prefer_imdsv2", "rabbit.aws_prefer_imdsv2",
+ [{datatype, {enum, [true, false]}}]}.
diff --git a/deps/rabbitmq_aws/rabbitmq-components.mk b/deps/rabbitmq_aws/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_aws/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_aws/src/rabbitmq_aws.erl b/deps/rabbitmq_aws/src/rabbitmq_aws.erl
index 4a152f3b21..476539b441 100644
--- a/deps/rabbitmq_aws/src/rabbitmq_aws.erl
+++ b/deps/rabbitmq_aws/src/rabbitmq_aws.erl
@@ -15,7 +15,9 @@
request/5, request/6, request/7,
set_credentials/2,
has_credentials/0,
- set_region/1]).
+ set_region/1,
+ ensure_imdsv2_token_valid/0,
+ api_get_request/2]).
%% gen-server exports
-export([start_link/0,
@@ -71,13 +73,22 @@ post(Service, Path, Body, Headers) ->
-spec refresh_credentials() -> ok | error.
-%% @doc Manually refresh the credentials from the environment, filesystem or EC2
-%% Instance metadata service.
+%% @doc Manually refresh the credentials from the environment, filesystem or EC2 Instance Metadata Service.
%% @end
refresh_credentials() ->
gen_server:call(rabbitmq_aws, refresh_credentials).
+-spec refresh_credentials(state()) -> ok | error.
+%% @doc Manually refresh the credentials from the environment, filesystem or EC2 Instance Metadata Service.
+%% @end
+refresh_credentials(State) ->
+ rabbit_log:debug("Refreshing AWS credentials..."),
+ {_, NewState} = load_credentials(State),
+ rabbit_log:debug("AWS credentials have been refreshed"),
+ set_credentials(NewState).
+
+
-spec request(Service :: string(),
Method :: method(),
Path :: path(),
@@ -120,6 +131,9 @@ request(Service, Method, Path, Body, Headers, HTTPOptions) ->
request(Service, Method, Path, Body, Headers, HTTPOptions, Endpoint) ->
gen_server:call(rabbitmq_aws, {request, Service, Method, Headers, Path, Body, HTTPOptions, Endpoint}).
+-spec set_credentials(state()) -> ok.
+set_credentials(NewState) ->
+ gen_server:call(rabbitmq_aws, {set_credentials, NewState}).
-spec set_credentials(access_key(), secret_access_key()) -> ok.
%% @doc Manually set the access credentials for requests. This should
@@ -137,6 +151,20 @@ set_credentials(AccessKey, SecretAccessKey) ->
set_region(Region) ->
gen_server:call(rabbitmq_aws, {set_region, Region}).
+-spec set_imdsv2_token(imdsv2token()) -> ok.
+%% @doc Manually set the Imdsv2Token used to perform instance metadata service requests.
+%% @end
+set_imdsv2_token(Imdsv2Token) ->
+ gen_server:call(rabbitmq_aws, {set_imdsv2_token, Imdsv2Token}).
+
+
+-spec get_imdsv2_token() -> imdsv2token().
+%% @doc return the current Imdsv2Token used to perform instance metadata service requests.
+%% @end
+get_imdsv2_token() ->
+ {ok, Imdsv2Token} = gen_server:call(rabbitmq_aws, get_imdsv2_token),
+ Imdsv2Token.
+
%%====================================================================
%% gen_server functions
@@ -158,15 +186,8 @@ terminate(_, _) ->
code_change(_, _, State) ->
{ok, State}.
-
-handle_call(Msg, _From, #state{region = undefined}) ->
- %% Delay initialisation until a RabbitMQ plugin require the AWS backend
- {ok, Region} = rabbitmq_aws_config:region(),
- {_, State} = load_credentials(#state{region = Region}),
- handle_msg(Msg, State);
handle_call(Msg, _From, State) ->
- handle_msg(Msg, State).
-
+ handle_msg(Msg, State).
handle_cast(_Request, State) ->
{noreply, State}.
@@ -196,15 +217,29 @@ handle_msg({set_credentials, AccessKey, SecretAccessKey}, State) ->
expiration = undefined,
error = undefined}};
+handle_msg({set_credentials, NewState}, State) ->
+ {reply, ok, State#state{access_key = NewState#state.access_key,
+ secret_access_key = NewState#state.secret_access_key,
+ security_token = NewState#state.security_token,
+ expiration = NewState#state.expiration,
+ error = NewState#state.error}};
+
handle_msg({set_region, Region}, State) ->
{reply, ok, State#state{region = Region}};
+handle_msg({set_imdsv2_token, Imdsv2Token}, State) ->
+ {reply, ok, State#state{imdsv2_token = Imdsv2Token}};
+
handle_msg(has_credentials, State) ->
{reply, has_credentials(State), State};
+handle_msg(get_imdsv2_token, State) ->
+ {reply, {ok, State#state.imdsv2_token}, State};
+
handle_msg(_Request, State) ->
{noreply, State}.
+
-spec endpoint(State :: state(), Host :: string(),
Service :: string(), Path :: string()) -> string().
%% @doc Return the endpoint URL, either by constructing it with the service
@@ -296,15 +331,17 @@ load_credentials(#state{region = Region}) ->
access_key = AccessKey,
secret_access_key = SecretAccessKey,
expiration = Expiration,
- security_token = SecurityToken}};
+ security_token = SecurityToken,
+ imdsv2_token = undefined}};
{error, Reason} ->
- error_logger:error_msg("Could not load AWS credentials from environment variables, AWS_CONFIG_FILE, AWS_SHARED_CREDENTIALS_FILE or EC2 metadata endpoint: ~p. Will depend on config settings to be set.~n.", [Reason]),
+ error_logger:error_msg("Could not load AWS credentials from environment variables, AWS_CONFIG_FILE, AWS_SHARED_CREDENTIALS_FILE or EC2 metadata endpoint: ~p. Will depend on config settings to be set~n", [Reason]),
{error, #state{region = Region,
error = Reason,
access_key = undefined,
secret_access_key = undefined,
expiration = undefined,
- security_token = undefined}}
+ security_token = undefined,
+ imdsv2_token = undefined}}
end.
@@ -377,23 +414,8 @@ perform_request_has_creds(false, State, _, _, _, _, _, _, _) ->
%% @end
perform_request_creds_expired(false, State, Service, Method, Headers, Path, Body, Options, Host) ->
perform_request_with_creds(State, Service, Method, Headers, Path, Body, Options, Host);
-perform_request_creds_expired(true, State, Service, Method, Headers, Path, Body, Options, Host) ->
- perform_request_creds_refreshed(load_credentials(State), Service, Method, Headers, Path, Body, Options, Host).
-
-
--spec perform_request_creds_refreshed({ok, State :: state()} | {error, State :: state()},
- Service :: string(), Method :: method(),
- Headers :: headers(), Path :: path(), Body :: body(),
- Options :: http_options(), Host :: string() | undefined)
- -> {Result :: result(), NewState :: state()}.
-%% @doc If it's been determined that there are credentials but they have expired,
-%% check to see if the credentials could be loaded and either make the request
-%% or return an error.
-%% @end
-perform_request_creds_refreshed({ok, State}, Service, Method, Headers, Path, Body, Options, Host) ->
- perform_request_with_creds(State, Service, Method, Headers, Path, Body, Options, Host);
-perform_request_creds_refreshed({error, State}, _, _, _, _, _, _, _) ->
- perform_request_creds_error(State).
+perform_request_creds_expired(true, State, _, _, _, _, _, _, _) ->
+ perform_request_creds_error(State#state{error = "Credentials expired!"}).
-spec perform_request_with_creds(State :: state(), Service :: string(), Method :: method(),
@@ -470,3 +492,78 @@ sign_headers(#state{access_key = AccessKey,
uri = URI,
headers = Headers,
body = Body}).
+
+-spec expired_imdsv2_token(imdsv2token()) -> boolean().
+%% @doc Determine whether or not an Imdsv2Token has expired.
+%% @end
+expired_imdsv2_token(undefined) ->
+ rabbit_log:debug("EC2 IMDSv2 token has not yet been obtained"),
+ true;
+expired_imdsv2_token({_, _, undefined}) ->
+ rabbit_log:debug("EC2 IMDSv2 token is not available"),
+ true;
+expired_imdsv2_token({_, _, Expiration}) ->
+ Now = calendar:datetime_to_gregorian_seconds(local_time()),
+ HasExpired = Now >= Expiration,
+ rabbit_log:debug("EC2 IMDSv2 token has expired: ~p", [HasExpired]),
+ HasExpired.
+
+
+-spec ensure_imdsv2_token_valid() -> imdsv2token().
+ensure_imdsv2_token_valid() ->
+ Imdsv2Token = get_imdsv2_token(),
+ case expired_imdsv2_token(Imdsv2Token) of
+ true -> Value = rabbitmq_aws_config:load_imdsv2_token(),
+ Expiration = calendar:datetime_to_gregorian_seconds(local_time()) + ?METADATA_TOKEN_TTL_SECONDS,
+ set_imdsv2_token(#imdsv2token{token = Value,
+ expiration = Expiration}),
+ Value;
+ _ -> Imdsv2Token#imdsv2token.token
+ end.
+
+-spec ensure_credentials_valid() -> ok.
+%% @doc Invoked before each AWS service API request to check if the current credentials are available and that they have not expired.
+%% If the credentials are available and are still current, then move on and perform the request.
+%% If the credentials are not available or have expired, then refresh them before performing the request.
+%% @end
+ensure_credentials_valid() ->
+ rabbit_log:debug("Making sure AWS credentials are available and still valid"),
+ {ok, State} = gen_server:call(rabbitmq_aws, get_state),
+ case has_credentials(State) of
+ true -> case expired_credentials(State#state.expiration) of
+ true -> refresh_credentials(State);
+ _ -> ok
+ end;
+ _ -> refresh_credentials(State)
+ end.
+
+
+-spec api_get_request(string(), path()) -> result().
+%% @doc Invoke an API call to an AWS service.
+%% @end
+api_get_request(Service, Path) ->
+ rabbit_log:debug("Invoking AWS request {Service: ~p; Path: ~p}...", [Service, Path]),
+ api_get_request_with_retries(Service, Path, ?MAX_RETRIES, ?LINEAR_BACK_OFF_MILLIS).
+
+
+-spec api_get_request_with_retries(string(), path(), integer(), integer()) -> result().
+%% @doc Invoke an API call to an AWS service with retries.
+%% @end
+api_get_request_with_retries(_, _, 0, _) ->
+ rabbit_log:warning("Request to AWS service has failed after ~b retries", [?MAX_RETRIES]),
+ {error, "AWS service is unavailable"};
+api_get_request_with_retries(Service, Path, Retries, WaitTimeBetweenRetries) ->
+ ensure_credentials_valid(),
+ case get(Service, Path) of
+ {ok, {_Headers, Payload}} -> rabbit_log:debug("AWS request: ~s~nResponse: ~p", [Path, Payload]),
+ {ok, Payload};
+ {error, {credentials, _}} -> {error, credentials};
+ {error, Message, Response} -> rabbit_log:warning("Error occurred: ~s", [Message]),
+ case Response of
+ {_, Payload} -> rabbit_log:warning("Failed AWS request: ~s~nResponse: ~p", [Path, Payload]);
+ _ -> ok
+ end,
+ rabbit_log:warning("Will retry AWS request, remaining retries: ~b", [Retries]),
+ timer:sleep(WaitTimeBetweenRetries),
+ api_get_request_with_retries(Service, Path, Retries - 1, WaitTimeBetweenRetries)
+ end.
diff --git a/deps/rabbitmq_aws/src/rabbitmq_aws_config.erl b/deps/rabbitmq_aws/src/rabbitmq_aws_config.erl
index 09b7606799..b4a24eb68d 100644
--- a/deps/rabbitmq_aws/src/rabbitmq_aws_config.erl
+++ b/deps/rabbitmq_aws/src/rabbitmq_aws_config.erl
@@ -1,7 +1,7 @@
%% ====================================================================
%% @author Gavin M. Roy <gavinmroy@gmail.com>
%% @copyright 2016, Gavin M. Roy
-%% @copyright 2016-2020 VMware, Inc. or its affiliates.
+%% @copyright 2016-2021 VMware, Inc. or its affiliates.
%% @private
%% @doc rabbitmq_aws configuration functionality
%% @end
@@ -17,6 +17,10 @@
instance_credentials_url/1,
instance_availability_zone_url/0,
instance_role_url/0,
+ instance_id_url/0,
+ instance_id/0,
+ load_imdsv2_token/0,
+ instance_metadata_request_headers/0,
region/0,
region/1]).
@@ -158,6 +162,14 @@ region(Profile) ->
end.
+-spec instance_id() -> string() | error.
+%% @doc Return the instance ID from the EC2 metadata service.
+%% @end
+instance_id() ->
+ URL = instance_id_url(),
+ parse_body_response(perform_http_get_instance_metadata(URL)).
+
+
-spec value(Profile :: string(), Key :: atom())
-> Value :: any() | {error, Reason :: atom()}.
%% @doc Return the configuration data for the specified profile or an error
@@ -413,6 +425,18 @@ instance_metadata_url(Path) ->
instance_role_url() ->
instance_metadata_url(string:join([?INSTANCE_METADATA_BASE, ?INSTANCE_CREDENTIALS], "/")).
+-spec imdsv2_token_url() -> string().
+%% @doc Return the URL for obtaining EC2 IMDSv2 token from the Instance Metadata service.
+%% @end
+imdsv2_token_url() ->
+ instance_metadata_url(?TOKEN_URL).
+
+-spec instance_id_url() -> string().
+%% @doc Return the URL for querying the id of the current instance from the Instance Metadata service.
+%% @end
+instance_id_url() ->
+ instance_metadata_url(string:join([?INSTANCE_METADATA_BASE, ?INSTANCE_ID], "/")).
+
-spec lookup_credentials(Profile :: string(),
AccessKey :: string() | false,
@@ -564,7 +588,7 @@ maybe_get_credentials_from_instance_metadata({error, undefined}) ->
{error, undefined};
maybe_get_credentials_from_instance_metadata({ok, Role}) ->
URL = instance_credentials_url(Role),
- parse_credentials_response(perform_http_get(URL)).
+ parse_credentials_response(perform_http_get_instance_metadata(URL)).
-spec maybe_get_region_from_instance_metadata()
@@ -573,7 +597,7 @@ maybe_get_credentials_from_instance_metadata({ok, Role}) ->
%% @end
maybe_get_region_from_instance_metadata() ->
URL = instance_availability_zone_url(),
- parse_az_response(perform_http_get(URL)).
+ parse_az_response(perform_http_get_instance_metadata(URL)).
%% @doc Try to query the EC2 local instance metadata service to get the role
@@ -581,7 +605,7 @@ maybe_get_region_from_instance_metadata() ->
%% @end
maybe_get_role_from_instance_metadata() ->
URL = instance_role_url(),
- parse_body_response(perform_http_get(URL)).
+ parse_body_response(perform_http_get_instance_metadata(URL)).
-spec parse_az_response(httpc_result())
@@ -597,11 +621,17 @@ parse_az_response({ok, {{_, _, _}, _, _}}) -> {error, undefined}.
-spec parse_body_response(httpc_result())
-> {ok, Value :: string()} | {error, Reason :: atom()}.
-%% @doc Parse the return response from the Instance Metadata service where the
+%% @doc Parse the return response from the Instance Metadata Service where the
%% body value is the string to process.
%% end.
parse_body_response({error, _}) -> {error, undefined};
parse_body_response({ok, {{_, 200, _}, _, Body}}) -> {ok, Body};
+parse_body_response({ok, {{_, 401, _}, _, _}}) ->
+ rabbit_log:error(get_instruction_on_instance_metadata_error("Unauthorized instance metadata service request.")),
+ {error, undefined};
+parse_body_response({ok, {{_, 403, _}, _, _}}) ->
+ rabbit_log:error(get_instruction_on_instance_metadata_error("The request is not allowed or the instance metadata service is turned off.")),
+ {error, undefined};
parse_body_response({ok, {{_, _, _}, _, _}}) -> {error, undefined}.
@@ -620,12 +650,21 @@ parse_credentials_response({ok, {{_, 200, _}, _, Body}}) ->
proplists:get_value("Token", Parsed)}.
--spec perform_http_get(string()) -> httpc_result().
-%% @doc Wrap httpc:get/4 to simplify Instance Metadata service requests
+-spec perform_http_get_instance_metadata(string()) -> httpc_result().
+%% @doc Wrap httpc:get/4 to simplify Instance Metadata service v2 requests
%% @end
-perform_http_get(URL) ->
- httpc:request(get, {URL, []},
- [{timeout, ?DEFAULT_HTTP_TIMEOUT}], []).
+perform_http_get_instance_metadata(URL) ->
+ rabbit_log:debug("Querying instance metadata service: ~p", [URL]),
+ httpc:request(get, {URL, instance_metadata_request_headers()},
+ [{timeout, ?DEFAULT_HTTP_TIMEOUT}], []).
+
+-spec get_instruction_on_instance_metadata_error(string()) -> string().
+%% @doc Return error message on failures related to EC2 Instance Metadata Service with a reference to AWS document.
+%% end
+get_instruction_on_instance_metadata_error(ErrorMessage) ->
+ ErrorMessage ++
+ " Please refer to the AWS documentation for details on how to configure the instance metadata service: "
+ "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html.".
-spec parse_iso8601_timestamp(Timestamp :: string() | binary()) -> calendar:datetime().
@@ -692,3 +731,46 @@ read_file(Fd, Lines) ->
%% @end
region_from_availability_zone(Value) ->
string:sub_string(Value, 1, length(Value) - 1).
+
+
+-spec load_imdsv2_token() -> security_token().
+%% @doc Attempt to obtain EC2 IMDSv2 token.
+%% @end
+load_imdsv2_token() ->
+ TokenUrl = imdsv2_token_url(),
+ rabbit_log:info("Attempting to obtain EC2 IMDSv2 token from ~p ...", [TokenUrl]),
+ case httpc:request(put, {TokenUrl, [{?METADATA_TOKEN_TTL_HEADER, integer_to_list(?METADATA_TOKEN_TTL_SECONDS)}]},
+ [{timeout, ?DEFAULT_HTTP_TIMEOUT}], []) of
+ {ok, {{_, 200, _}, _, Value}} ->
+ rabbit_log:debug("Successfully obtained EC2 IMDSv2 token."),
+ Value;
+ {error, {{_, 400, _}, _, _}} ->
+ rabbit_log:warning("Failed to obtain EC2 IMDSv2 token: Missing or Invalid Parameters – The PUT request is not valid."),
+ undefined;
+ Other ->
+ rabbit_log:warning(
+ get_instruction_on_instance_metadata_error("Failed to obtain EC2 IMDSv2 token: ~p. "
+ "Falling back to EC2 IMDSv1 for now. It is recommended to use EC2 IMDSv2."), [Other]),
+ undefined
+ end.
+
+
+-spec instance_metadata_request_headers() -> headers().
+%% @doc Return headers used for instance metadata service requests.
+%% @end
+instance_metadata_request_headers() ->
+ case application:get_env(rabbit, aws_prefer_imdsv2) of
+ {ok, false} -> [];
+ _ -> %% undefined or {ok, true}
+ rabbit_log:debug("EC2 Instance Metadata Service v2 (IMDSv2) is preferred."),
+ maybe_imdsv2_token_headers()
+ end.
+
+-spec maybe_imdsv2_token_headers() -> headers().
+%% @doc Construct http request headers from Imdsv2Token to use with GET requests submitted to the EC2 Instance Metadata Service.
+%% @end
+maybe_imdsv2_token_headers() ->
+ case rabbitmq_aws:ensure_imdsv2_token_valid() of
+ undefined -> [];
+ Value -> [{?METADATA_TOKEN, Value}]
+ end.
diff --git a/deps/rabbitmq_aws/src/rabbitmq_aws_json.erl b/deps/rabbitmq_aws/src/rabbitmq_aws_json.erl
index 5b4e3b2f45..0eaf3183df 100644
--- a/deps/rabbitmq_aws/src/rabbitmq_aws_json.erl
+++ b/deps/rabbitmq_aws/src/rabbitmq_aws_json.erl
@@ -15,16 +15,8 @@
decode(Value) when is_list(Value) ->
decode(list_to_binary(Value));
decode(Value) when is_binary(Value) ->
- % We set an empty list of options because we don't want the default
- % options set in rabbit_json:cecode/1. And we can't override
- % 'return_maps' with '{return_maps, false}' because of a bug in jsx's
- % options handler.
- % See https://github.com/talentdeficit/jsx/pull/115
- Decoded0 = rabbit_json:decode(Value, []),
- Decoded = if
- is_map(Decoded0) -> maps:to_list(Decoded0);
- is_list(Decoded0) -> Decoded0
- end,
+ Decoded0 = rabbit_json:decode(Value, [{return_maps, false}]),
+ Decoded = rabbit_data_coercion:to_proplist(Decoded0),
convert_binary_values(Decoded, []).
diff --git a/deps/rabbitmq_aws/src/rabbitmq_aws_sign.erl b/deps/rabbitmq_aws/src/rabbitmq_aws_sign.erl
index b238f81cee..86298d28ca 100644
--- a/deps/rabbitmq_aws/src/rabbitmq_aws_sign.erl
+++ b/deps/rabbitmq_aws/src/rabbitmq_aws_sign.erl
@@ -10,17 +10,11 @@
%% API
-export([headers/1, request_hash/5]).
-%% Transitional step until we can require Erlang/OTP 22 and
-%% use crypto:mac/4 instead of crypto:hmac/3.
--compile(nowarn_deprecated_function).
-
%% Export all for unit tests
-ifdef(TEST).
-compile(export_all).
-endif.
--ignore_xref([{crypto, hmac, 3}]).
-
-include("rabbitmq_aws.hrl").
-define(ALGORITHM, "AWS4-HMAC-SHA256").
@@ -162,7 +156,7 @@ header_value(Key, Headers, Default) ->
%% @doc Return the SHA-256 hash for the specified value.
%% @end
hmac_sign(Key, Message) ->
- SignedValue = crypto:hmac(sha256, Key, Message),
+ SignedValue = crypto:mac(hmac, sha256, Key, Message),
binary_to_list(SignedValue).
@@ -248,7 +242,7 @@ signed_headers([{Key,_}|T], SignedHeaders) ->
%% @doc Create the request signature.
%% @end
signature(StringToSign, SigningKey) ->
- SignedValue = crypto:hmac(sha256, SigningKey, StringToSign),
+ SignedValue = crypto:mac(hmac, sha256, SigningKey, StringToSign),
lists:flatten(io_lib:format("~64.16.0b", [binary:decode_unsigned(SignedValue)])).
diff --git a/deps/rabbitmq_aws/test/src/rabbitmq_aws_config_tests.erl b/deps/rabbitmq_aws/test/src/rabbitmq_aws_config_tests.erl
index ac1d65f59f..b297fac826 100644
--- a/deps/rabbitmq_aws/test/src/rabbitmq_aws_config_tests.erl
+++ b/deps/rabbitmq_aws/test/src/rabbitmq_aws_config_tests.erl
@@ -70,6 +70,10 @@ instance_metadata_test_() ->
?assertEqual("http://169.254.169.254/latest/meta-data/placement/availability-zone",
rabbitmq_aws_config:instance_availability_zone_url())
end},
+ {"instance id URL", fun() ->
+ ?assertEqual("http://169.254.169.254/latest/meta-data/instance-id",
+ rabbitmq_aws_config:instance_id_url())
+ end},
{"arbitrary paths", fun () ->
?assertEqual("http://169.254.169.254/a/b/c", rabbitmq_aws_config:instance_metadata_url("a/b/c")),
?assertEqual("http://169.254.169.254/a/b/c", rabbitmq_aws_config:instance_metadata_url("/a/b/c"))
@@ -96,8 +100,9 @@ credentials_test_() ->
foreach,
fun () ->
meck:new(httpc),
+ meck:new(rabbitmq_aws),
reset_environment(),
- [httpc]
+ [httpc, rabbitmq_aws]
end,
fun meck:unload/1,
[
@@ -114,6 +119,7 @@ credentials_test_() ->
end},
{"with missing environment variable", fun() ->
os:putenv("AWS_ACCESS_KEY_ID", "Sésame"),
+ meck:sequence(rabbitmq_aws, ensure_imdsv2_token_valid, 0, "secret_imdsv2_token"),
?assertEqual({error, undefined},
rabbitmq_aws_config:credentials())
end},
@@ -129,11 +135,13 @@ credentials_test_() ->
end},
{"from config file with bad profile", fun() ->
setup_test_config_env_var(),
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
?assertEqual({error, undefined},
rabbitmq_aws_config:credentials("bad-profile-name"))
end},
{"from credentials file with default profile", fun() ->
setup_test_credentials_env_var(),
+
?assertEqual({ok, "foo1", "bar1", undefined, undefined},
rabbitmq_aws_config:credentials())
end},
@@ -144,21 +152,25 @@ credentials_test_() ->
end},
{"from credentials file with bad profile", fun() ->
setup_test_credentials_env_var(),
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
?assertEqual({error, undefined},
rabbitmq_aws_config:credentials("bad-profile-name"))
end},
{"from credentials file with only the key in profile", fun() ->
setup_test_credentials_env_var(),
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
?assertEqual({error, undefined},
rabbitmq_aws_config:credentials("only-key"))
end},
{"from credentials file with only the value in profile", fun() ->
setup_test_credentials_env_var(),
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
?assertEqual({error, undefined},
rabbitmq_aws_config:credentials("only-value"))
end},
{"from credentials file with missing keys in profile", fun() ->
setup_test_credentials_env_var(),
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
?assertEqual({error, undefined},
rabbitmq_aws_config:credentials("bad-entry"))
end},
@@ -167,23 +179,27 @@ credentials_test_() ->
meck:sequence(httpc, request, 4,
[{ok, {{protocol, 200, message}, headers, "Bob"}},
{ok, {{protocol, 200, message}, headers, CredsBody}}]),
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
Expectation = {ok, "ASIAIMAFAKEACCESSKEY", "2+t64tZZVaz0yp0x1G23ZRYn+FAKEyVALUEs/4qh",
{{2016,4,1},{4,13,28}}, "FAKE//////////wEAK/TOKEN/VALUE="},
?assertEqual(Expectation, rabbitmq_aws_config:credentials())
end
},
{"with instance metadata service role error", fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
meck:expect(httpc, request, 4, {error, timeout}),
?assertEqual({error, undefined}, rabbitmq_aws_config:credentials())
end
},
{"with instance metadata service role http error", fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
meck:expect(httpc, request, 4,
{ok, {{protocol, 500, message}, headers, "Internal Server Error"}}),
?assertEqual({error, undefined}, rabbitmq_aws_config:credentials())
end
},
{"with instance metadata service credentials error", fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
meck:sequence(httpc, request, 4,
[{ok, {{protocol, 200, message}, headers, "Bob"}},
{error, timeout}]),
@@ -191,6 +207,7 @@ credentials_test_() ->
end
},
{"with instance metadata service credentials not found", fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
meck:sequence(httpc, request, 4,
[{ok, {{protocol, 200, message}, headers, "Bob"}},
{ok, {{protocol, 404, message}, headers, "File Not Found"}}]),
@@ -285,8 +302,9 @@ region_test_() ->
foreach,
fun () ->
meck:new(httpc),
+ meck:new(rabbitmq_aws),
reset_environment(),
- [httpc]
+ [httpc, rabbitmq_aws]
end,
fun meck:unload/1,
[
@@ -304,17 +322,21 @@ region_test_() ->
end},
{"missing profile in config", fun() ->
setup_test_config_env_var(),
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
?assertEqual({ok, ?DEFAULT_REGION}, rabbitmq_aws_config:region("no-region"))
end},
{"from instance metadata service", fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
meck:expect(httpc, request, 4,
{ok, {{protocol, 200, message}, headers, "us-west-1a"}}),
?assertEqual({ok, "us-west-1"}, rabbitmq_aws_config:region())
end},
{"full lookup failure", fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
?assertEqual({ok, ?DEFAULT_REGION}, rabbitmq_aws_config:region())
end},
{"http error failure", fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
meck:expect(httpc, request, 4,
{ok, {{protocol, 500, message}, headers, "Internal Server Error"}}),
?assertEqual({ok, ?DEFAULT_REGION}, rabbitmq_aws_config:region())
@@ -322,6 +344,93 @@ region_test_() ->
]}.
+instance_id_test_() ->
+ {
+ foreach,
+ fun () ->
+ meck:new(httpc),
+ meck:new(rabbitmq_aws),
+ reset_environment(),
+ [httpc, rabbitmq_aws]
+ end,
+ fun meck:unload/1,
+ [
+ {"get instance id successfully",
+ fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
+ meck:expect(httpc, request, 4, {ok, {{protocol, 200, message}, headers, "instance-id"}}),
+ ?assertEqual({ok, "instance-id"}, rabbitmq_aws_config:instance_id())
+ end
+ },
+ {"getting instance id is rejected with invalid token error",
+ fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, "invalid"),
+ meck:expect(httpc, request, 4, {error, {{protocol, 401, message}, headers, "Invalid token"}}),
+ ?assertEqual({error, undefined}, rabbitmq_aws_config:instance_id())
+ end
+ },
+ {"getting instance id is rejected with access denied error",
+ fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, "expired token"),
+ meck:expect(httpc, request, 4, {error, {{protocol, 403, message}, headers, "access denied"}}),
+ ?assertEqual({error, undefined}, rabbitmq_aws_config:instance_id())
+ end
+ }
+ ]
+ }.
+
+load_imdsv2_token_test_() ->
+ {
+ foreach,
+ fun () ->
+ meck:new(httpc),
+ [httpc]
+ end,
+ fun meck:unload/1,
+ [
+ {"fail to get imdsv2 token - timeout",
+ fun() ->
+ meck:expect(httpc, request, 4, {error, timeout}),
+ ?assertEqual(undefined, rabbitmq_aws_config:load_imdsv2_token())
+ end},
+ {"fail to get imdsv2 token - PUT request is not valid",
+ fun() ->
+ meck:expect(httpc, request, 4, {error, {{protocol, 400, messge}, headers, "Missing or Invalid Parameters – The PUT request is not valid."}}),
+ ?assertEqual(undefined, rabbitmq_aws_config:load_imdsv2_token())
+ end},
+ {"successfully get imdsv2 token from instance metadata service",
+ fun() ->
+ IMDSv2Token = "super_secret_token_value",
+ meck:sequence(httpc, request, 4,
+ [{ok, {{protocol, 200, message}, headers, IMDSv2Token}}]),
+ ?assertEqual(IMDSv2Token, rabbitmq_aws_config:load_imdsv2_token())
+ end}
+ ]
+ }.
+
+
+maybe_imdsv2_token_headers_test_() ->
+ {
+ foreach,
+ fun () ->
+ meck:new(rabbitmq_aws),
+ [rabbitmq_aws]
+ end,
+ fun meck:unload/1,
+ [
+ {"imdsv2 token is not available", fun() ->
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, undefined),
+ ?assertEqual([], rabbitmq_aws_config:maybe_imdsv2_token_headers())
+ end}
+ ,
+ {"imdsv2 is available", fun() ->
+ IMDSv2Token = "super_secret_token_value ;)",
+ meck:expect(rabbitmq_aws, ensure_imdsv2_token_valid, 0, IMDSv2Token),
+ ?assertEqual([{"X-aws-ec2-metadata-token", IMDSv2Token}], rabbitmq_aws_config:maybe_imdsv2_token_headers())
+ end}
+ ]
+ }.
+
reset_environment() ->
os:unsetenv("AWS_ACCESS_KEY_ID"),
os:unsetenv("AWS_DEFAULT_REGION"),
diff --git a/deps/rabbitmq_aws/test/src/rabbitmq_aws_tests.erl b/deps/rabbitmq_aws/test/src/rabbitmq_aws_tests.erl
index 48c06e2628..a6bf8b8c52 100644
--- a/deps/rabbitmq_aws/test/src/rabbitmq_aws_tests.erl
+++ b/deps/rabbitmq_aws/test/src/rabbitmq_aws_tests.erl
@@ -19,19 +19,23 @@ init_test_() ->
os:putenv("AWS_ACCESS_KEY_ID", "Sésame"),
os:putenv("AWS_SECRET_ACCESS_KEY", "ouvre-toi"),
{ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-west-3"),
+ rabbitmq_aws:refresh_credentials(),
{ok, State} = gen_server:call(Pid, get_state),
ok = gen_server:stop(Pid),
os:unsetenv("AWS_ACCESS_KEY_ID"),
os:unsetenv("AWS_SECRET_ACCESS_KEY"),
- Expectation = {state,"Sésame","ouvre-toi",undefined,undefined,"us-west-3", undefined},
+ Expectation = {state,"Sésame","ouvre-toi",undefined,undefined,"us-west-3", undefined,undefined},
?assertEqual(Expectation, State)
end},
{"error", fun() ->
meck:expect(rabbitmq_aws_config, credentials, fun() -> {error, test_result} end),
{ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-west-3"),
+ rabbitmq_aws:refresh_credentials(),
{ok, State} = gen_server:call(Pid, get_state),
ok = gen_server:stop(Pid),
- Expectation = {state,undefined,undefined,undefined,undefined,"us-west-3",test_result},
+ Expectation = {state,undefined,undefined,undefined,undefined,"us-west-3",undefined,test_result},
?assertEqual(Expectation, State),
meck:validate(rabbitmq_aws_config)
end}
@@ -214,7 +218,7 @@ gen_server_call_test_() ->
?assertEqual({reply, ok, State},
rabbitmq_aws:handle_call({set_credentials,
State#state.access_key,
- State#state.secret_access_key}, eunit, #state{}))
+ State#state.secret_access_key}, eunit, #state{region = "us-west-3"}))
end
},
{
@@ -224,7 +228,8 @@ gen_server_call_test_() ->
secret_access_key = "ouvre-toi",
region = "us-east-5"},
?assertEqual({reply, ok, State},
- rabbitmq_aws:handle_call({set_region, "us-east-5"}, eunit, #state{}))
+ rabbitmq_aws:handle_call({set_region, "us-east-5"}, eunit, #state{access_key = "Sésame",
+ secret_access_key = "ouvre-toi"}))
end
}
]
@@ -390,53 +395,13 @@ perform_request_test_() ->
Options = [],
Host = undefined,
meck:expect(rabbitmq_aws_config, credentials, fun() -> {error, unit_test} end),
- Expectation = {{error, {credentials, unit_test}}, #state{region = State#state.region, error = unit_test}},
+ Expectation = {{error, {credentials, "Credentials expired!"}}, State#state{error = "Credentials expired!"}},
Result = rabbitmq_aws:perform_request(State, Service, Method, Headers, Path, Body, Options, Host),
?assertEqual(Expectation, Result),
meck:validate(rabbitmq_aws_config)
end
},
{
- "refresh expired creds",
- fun() ->
- State = #state{access_key = "AKIDEXAMPLE",
- secret_access_key = "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
- region = "us-east-1",
- security_token = "AQoEXAMPLEH4aoAH0gNCAPyJxz4BlCFFxWNE1OPTgk5TthT+FvwqnKwRcOIfrRh3c/L",
- expiration = {{1973, 1, 1}, {10, 20, 30}}},
- Service = "ec2",
- Method = post,
- Headers = [{"Content-Type", "text/xml"}],
- Path = "/?Action=DescribeTags&Version=2015-10-01",
- Body = "<value>true</value>",
- Options = [],
- Host = undefined,
- meck:expect(httpc, request,
- fun(post, {_URI, _Headers, "text/xml", "<value>true</value>"}, _Options, []) ->
- {ok, {{"HTTP/1.0", 200, "OK"}, [{"content-type", "application/json"}], "{\"pass\": true}"}}
- end),
-
- State2 = #state{access_key = "AKIDEXAMPLE2",
- secret_access_key = "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY2",
- region = "us-east-1",
- security_token = "AQoEXAMPLEH4aoAH0gNCAPyJxz4BlCFFxWNE1OPTgk5TthT+FvwqnKwRcOIfrRh3c/L2",
- expiration = calendar:local_time()},
- meck:expect(rabbitmq_aws_config, credentials,
- fun() ->
- {ok,
- State2#state.access_key,
- State2#state.secret_access_key,
- State2#state.expiration,
- State2#state.security_token}
- end),
-
- Expectation = {{ok, {[{"content-type", "application/json"}], [{"pass", true}]}}, State2},
- Result = rabbitmq_aws:perform_request(State, Service, Method, Headers, Path, Body, Options, Host),
- ?assertEqual(Expectation, Result),
- meck:validate(httpc),
- meck:validate(rabbitmq_aws_config)
- end},
- {
"creds_error",
fun() ->
State = #state{error=unit_test},
@@ -477,3 +442,189 @@ sign_headers_test_() ->
end}
]
}.
+
+api_get_request_test_() ->
+ {
+ foreach,
+ fun () ->
+ meck:new(httpc, []),
+ meck:new(rabbitmq_aws_config, []),
+ [httpc, rabbitmq_aws_config]
+ end,
+ fun meck:unload/1,
+ [
+ {"AWS service API request succeeded",
+ fun() ->
+ State = #state{access_key = "ExpiredKey",
+ secret_access_key = "ExpiredAccessKey",
+ region = "us-east-1",
+ expiration = {{3016, 4, 1}, {12, 0, 0}}},
+ meck:expect(httpc, request, 4, {ok, {{"HTTP/1.0", 200, "OK"}, [{"content-type", "application/json"}], "{\"data\": \"value\"}"}}),
+ {ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-east-1"),
+ rabbitmq_aws:set_credentials(State),
+ Result = rabbitmq_aws:api_get_request("AWS", "API"),
+ ok = gen_server:stop(Pid),
+ ?assertEqual({ok, [{"data","value"}]}, Result),
+ meck:validate(httpc)
+ end
+ },
+ {"AWS service API request failed - credentials",
+ fun() ->
+ meck:expect(rabbitmq_aws_config, credentials, 0, {error, undefined}),
+ {ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-east-1"),
+ Result = rabbitmq_aws:api_get_request("AWS", "API"),
+ ok = gen_server:stop(Pid),
+ ?assertEqual({error, credentials}, Result)
+ end
+ },
+ {"AWS service API request failed - API error with persistent failure",
+ fun() ->
+ State = #state{access_key = "ExpiredKey",
+ secret_access_key = "ExpiredAccessKey",
+ region = "us-east-1",
+ expiration = {{3016, 4, 1}, {12, 0, 0}}},
+ meck:expect(httpc, request, 4, {error, "network error"}),
+ {ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-east-1"),
+ rabbitmq_aws:set_credentials(State),
+ Result = rabbitmq_aws:api_get_request_with_retries("AWS", "API", 3, 1),
+ ok = gen_server:stop(Pid),
+ ?assertEqual({error, "AWS service is unavailable"}, Result),
+ meck:validate(httpc)
+ end
+ },
+ {"AWS service API request succeeded after a transient error",
+ fun() ->
+ State = #state{access_key = "ExpiredKey",
+ secret_access_key = "ExpiredAccessKey",
+ region = "us-east-1",
+ expiration = {{3016, 4, 1}, {12, 0, 0}}},
+ meck:expect(httpc, request, 4, meck:seq([
+ {error, "network error"},
+ {ok, {{"HTTP/1.0", 500, "OK"}, [{"content-type", "application/json"}], "{\"error\": \"server error\"}"}},
+ {ok, {{"HTTP/1.0", 200, "OK"}, [{"content-type", "application/json"}], "{\"data\": \"value\"}"}}
+ ])),
+ {ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-east-1"),
+ rabbitmq_aws:set_credentials(State),
+ Result = rabbitmq_aws:api_get_request_with_retries("AWS", "API", 3, 1),
+ ok = gen_server:stop(Pid),
+ ?assertEqual({ok, [{"data","value"}]}, Result),
+ meck:validate(httpc)
+ end
+ }
+ ]
+ }.
+
+ensure_credentials_valid_test_() ->
+ {
+ foreach,
+ fun () ->
+ meck:new(rabbitmq_aws_config, []),
+ [rabbitmq_aws_config]
+ end,
+ fun meck:unload/1,
+ [
+ {"expired credentials are refreshed",
+ fun() ->
+ State = #state{access_key = "ExpiredKey",
+ secret_access_key = "ExpiredAccessKey",
+ region = "us-east-1",
+ expiration = {{2016, 4, 1}, {12, 0, 0}}},
+ State2 = #state{access_key = "NewKey",
+ secret_access_key = "NewAccessKey",
+ region = "us-east-1",
+ expiration = {{3016, 4, 1}, {12, 0, 0}}},
+
+ meck:expect(rabbitmq_aws_config, credentials,
+ fun() ->
+ {ok,
+ State2#state.access_key,
+ State2#state.secret_access_key,
+ State2#state.expiration,
+ State2#state.security_token}
+ end),
+ {ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-east-1"),
+ rabbitmq_aws:set_credentials(State),
+ Result = rabbitmq_aws:ensure_credentials_valid(),
+ Credentials = gen_server:call(Pid, get_state),
+ ok = gen_server:stop(Pid),
+ ?assertEqual(ok, Result),
+ ?assertEqual(Credentials, {ok, State2}),
+ meck:validate(rabbitmq_aws_config)
+ end},
+ {"valid credentials are returned",
+ fun() ->
+ State = #state{access_key = "GoodKey",
+ secret_access_key = "GoodAccessKey",
+ region = "us-east-1",
+ expiration = {{3016, 4, 1}, {12, 0, 0}}},
+ {ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-east-1"),
+ rabbitmq_aws:set_credentials(State),
+ Result = rabbitmq_aws:ensure_credentials_valid(),
+ Credentials = gen_server:call(Pid, get_state),
+ ok = gen_server:stop(Pid),
+ ?assertEqual(ok, Result),
+ ?assertEqual(Credentials, {ok, State}),
+ meck:validate(rabbitmq_aws_config)
+ end},
+ {"load credentials if missing",
+ fun() ->
+ State = #state{access_key = "GoodKey",
+ secret_access_key = "GoodAccessKey",
+ region = "us-east-1",
+ expiration = {{3016, 4, 1}, {12, 0, 0}}},
+ meck:expect(rabbitmq_aws_config, credentials,
+ fun() ->
+ {ok,
+ State#state.access_key,
+ State#state.secret_access_key,
+ State#state.expiration,
+ State#state.security_token}
+ end),
+ {ok, Pid} = rabbitmq_aws:start_link(),
+ rabbitmq_aws:set_region("us-east-1"),
+ Result = rabbitmq_aws:ensure_credentials_valid(),
+ Credentials = gen_server:call(Pid, get_state),
+ ok = gen_server:stop(Pid),
+ ?assertEqual(ok, Result),
+ ?assertEqual(Credentials, {ok, State}),
+ meck:validate(rabbitmq_aws_config)
+ end}
+ ]
+ }.
+
+expired_imdsv2_token_test_() ->
+ [
+ {"imdsv2 token is valid",
+ fun() ->
+ [Value] = calendar:local_time_to_universal_time_dst(calendar:local_time()),
+ Now = calendar:datetime_to_gregorian_seconds(Value),
+ Imdsv2Token = #imdsv2token{token = "value", expiration = Now + 100},
+ ?assertEqual(false, rabbitmq_aws:expired_imdsv2_token(Imdsv2Token))
+ end
+ },
+ {"imdsv2 token is expired",
+ fun() ->
+ [Value] = calendar:local_time_to_universal_time_dst(calendar:local_time()),
+ Now = calendar:datetime_to_gregorian_seconds(Value),
+ Imdsv2Token = #imdsv2token{token = "value", expiration = Now - 100},
+ ?assertEqual(true, rabbitmq_aws:expired_imdsv2_token(Imdsv2Token))
+ end
+ },
+ {"imdsv2 token is not yet initialized",
+ fun() ->
+ ?assertEqual(true, rabbitmq_aws:expired_imdsv2_token(undefined))
+ end
+ },
+ {"imdsv2 token is undefined",
+ fun() ->
+ Imdsv2Token = #imdsv2token{token = undefined, expiration = undefined},
+ ?assertEqual(true, rabbitmq_aws:expired_imdsv2_token(Imdsv2Token))
+ end
+ }
+ ].
diff --git a/deps/rabbitmq_cli/.gitignore b/deps/rabbitmq_cli/.gitignore
index 0ade5483bf..316e39505d 100644
--- a/deps/rabbitmq_cli/.gitignore
+++ b/deps/rabbitmq_cli/.gitignore
@@ -5,6 +5,7 @@
/log
/.erlang.mk/
/ebin
+/sbin
erl_crash.dump
mix.lock
*.ez
diff --git a/deps/rabbitmq_cli/BUILD.bazel b/deps/rabbitmq_cli/BUILD.bazel
new file mode 100644
index 0000000000..c58860b0fc
--- /dev/null
+++ b/deps/rabbitmq_cli/BUILD.bazel
@@ -0,0 +1,42 @@
+load(":rabbitmqctl.bzl", "rabbitmqctl")
+load(":rabbitmqctl_test.bzl", "rabbitmqctl_test")
+load(":elixir.bzl", "elixir")
+
+# This rule simply exposes elixir as a runtime lib for tests in erlang
+elixir(
+ name = "elixir_as_bazel_erlang_lib",
+ visibility = ["//visibility:public"],
+)
+
+# Note: All the various rabbitmq-* scripts are just copies of rabbitmqctl
+rabbitmqctl(
+ name = "rabbitmqctl",
+ srcs = glob([
+ "mix.exs",
+ "config/config.exs",
+ "lib/**/*.ex",
+ ]),
+ visibility = ["//visibility:public"],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+)
+
+rabbitmqctl_test(
+ name = "rabbitmqctl_tests",
+ size = "large",
+ srcs = glob([
+ "mix.exs",
+ "config/config.exs",
+ "lib/**/*.ex",
+ "test/**/*.exs",
+ ]),
+ data = glob(["test/fixtures/**/*"]),
+ flaky = True,
+ rabbitmq_run = "//:rabbitmq-for-cli-tests-run",
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+)
diff --git a/deps/rabbitmq_cli/Makefile b/deps/rabbitmq_cli/Makefile
index 856f39e605..dc87ec8814 100644
--- a/deps/rabbitmq_cli/Makefile
+++ b/deps/rabbitmq_cli/Makefile
@@ -1,7 +1,5 @@
PROJECT = rabbitmq_cli
-dep_observer_cli = git https://github.com/zhongwencool/observer_cli 1.4.4
-
BUILD_DEPS = rabbit_common
DEPS = observer_cli
TEST_DEPS = amqp_client rabbit
@@ -13,7 +11,7 @@ VERBOSE_TEST ?= true
MAX_CASES ?= 1
MIX_TEST_OPTS ?= ""
-MIX_TEST = mix test --max-cases=$(MAX_CASES)
+MIX_TEST = ERL_COMPILER_OPTIONS=deterministic mix test --max-cases=$(MAX_CASES)
ifneq ("",$(MIX_TEST_OPTS))
MIX_TEST := $(MIX_TEST) $(MIX_TEST_OPTS)
@@ -36,8 +34,8 @@ WITHOUT = plugins/cover \
plugins/proper \
plugins/triq
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
# rabbitmq-mix.mk is generated during the creation of the RabbitMQ
# source archive. It sets some environment variables to allow
@@ -49,7 +47,8 @@ LINKED_ESCRIPTS = escript/rabbitmq-plugins \
escript/rabbitmq-diagnostics \
escript/rabbitmq-queues \
escript/rabbitmq-streams \
- escript/rabbitmq-upgrade
+ escript/rabbitmq-tanzu \
+ escript/rabbitmq-upgrade
ESCRIPTS = $(ACTUAL_ESCRIPTS) $(LINKED_ESCRIPTS)
# Record the build and link dependency: the target files are linked to
@@ -58,8 +57,9 @@ rabbitmq-plugins = escript/rabbitmqctl
rabbitmq-diagnostics = escript/rabbitmqctl
rabbitmq-queues = escript/rabbitmqctl
rabbitmq-streams = escript/rabbitmqctl
+rabbitmq-tanzu = escript/rabbitmqctl
rabbitmq-upgrade = escript/rabbitmqctl
-escript/rabbitmq-plugins escript/rabbitmq-diagnostics escript/rabbitmq-queues escript/rabbitmq-streams escript/rabbitmq-upgrade: escript/rabbitmqctl
+escript/rabbitmq-plugins escript/rabbitmq-diagnostics escript/rabbitmq-queues escript/rabbitmq-streams escript/rabbitmq-tanzu escript/rabbitmq-upgrade: escript/rabbitmqctl
# We use hardlinks or symlinks in the `escript` directory and
# install's PREFIX when a single escript can have several names (eg.
@@ -106,9 +106,9 @@ rabbitmqctl_srcs := mix.exs \
# ones).
$(ACTUAL_ESCRIPTS): $(rabbitmqctl_srcs)
$(gen_verbose) if test -d ../.hex; then \
- echo y | mix make_all_in_src_archive; \
+ echo y | ERL_COMPILER_OPTIONS=deterministic mix make_all_in_src_archive; \
else \
- echo y | mix make_all; \
+ echo y | ERL_COMPILER_OPTIONS=deterministic mix make_all; \
fi
$(LINKED_ESCRIPTS):
diff --git a/deps/rabbitmq_cli/elixir.bzl b/deps/rabbitmq_cli/elixir.bzl
new file mode 100644
index 0000000000..e9d738ef5f
--- /dev/null
+++ b/deps/rabbitmq_cli/elixir.bzl
@@ -0,0 +1,45 @@
+load("@bazel-erlang//:erlang_home.bzl", "ErlangHomeProvider", "ErlangVersionProvider")
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "ErlangLibInfo", "path_join")
+load("//:elixir_home.bzl", "ElixirHomeProvider")
+
+def _impl(ctx):
+ erlang_version = ctx.attr._erlang_version[ErlangVersionProvider].version
+ erlang_home = ctx.attr._erlang_home[ErlangHomeProvider].path
+ elixir_home = ctx.attr._elixir_home[ElixirHomeProvider].path
+
+ ebin = ctx.actions.declare_directory(path_join(ctx.attr.name, "ebin"))
+
+ ctx.actions.run(
+ inputs = [],
+ outputs = [ebin],
+ executable = "cp",
+ arguments = [
+ "-R",
+ "{}/lib/elixir/ebin".format(elixir_home),
+ ebin.dirname,
+ ],
+ )
+
+ return [
+ DefaultInfo(
+ files = depset([ebin]),
+ runfiles = ctx.runfiles([ebin]),
+ ),
+ ErlangLibInfo(
+ lib_name = ctx.attr.name,
+ erlang_version = erlang_version,
+ include = [],
+ beam = [ebin],
+ priv = [],
+ deps = [],
+ ),
+ ]
+
+elixir = rule(
+ implementation = _impl,
+ attrs = {
+ "_erlang_version": attr.label(default = "@bazel-erlang//:erlang_version"),
+ "_erlang_home": attr.label(default = "@bazel-erlang//:erlang_home"),
+ "_elixir_home": attr.label(default = "//:elixir_home"),
+ },
+)
diff --git a/deps/rabbitmq_cli/erlang.mk b/deps/rabbitmq_cli/erlang.mk
deleted file mode 100644
index 77933a9acf..0000000000
--- a/deps/rabbitmq_cli/erlang.mk
+++ /dev/null
@@ -1,7296 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT = plugins/cover plugins/ct plugins/dialyzer plugins/eunit plugins/proper plugins/triq
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_cli/lib/rabbit_common/records.ex b/deps/rabbitmq_cli/lib/rabbit_common/records.ex
index dc1503caf7..cfb6b1a79b 100644
--- a/deps/rabbitmq_cli/lib/rabbit_common/records.ex
+++ b/deps/rabbitmq_cli/lib/rabbit_common/records.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitCommon.Records do
require Record
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/auto_complete.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/auto_complete.ex
index 1f907b528d..b8c97a8e18 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/auto_complete.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/auto_complete.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.AutoComplete do
alias RabbitMQ.CLI.Core.{CommandModules, Parser}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/command_behaviour.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/command_behaviour.ex
index 800d4d3227..525b6958bf 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/command_behaviour.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/command_behaviour.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.CommandBehaviour do
alias RabbitMQ.CLI.Core.Helpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_default_switches_and_timeout.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_default_switches_and_timeout.ex
index 3a78974b0c..3c1b517fe1 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_default_switches_and_timeout.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_default_switches_and_timeout.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be stopped
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_no_positional_arguments.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_no_positional_arguments.ex
index 166c4f22f7..ef5de9c91e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_no_positional_arguments.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_no_positional_arguments.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be stopped
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positional_argument.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positional_argument.ex
index 3731775ed3..402593bb04 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positional_argument.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positional_argument.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be stopped
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positive_integer_argument.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positive_integer_argument.ex
index 6b6fd28dfe..0825dc7ed9 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positive_integer_argument.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_one_positive_integer_argument.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be stopped
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_two_positional_arguments.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_two_positional_arguments.ex
index 18b2740b42..701dbc3699 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_two_positional_arguments.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/accepts_two_positional_arguments.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be stopped
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/alarms.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/alarms.ex
index 15143f7f69..38fe53ca99 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/alarms.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/alarms.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Alarms do
def alarm_lines(alarms, node_name) do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/ansi.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/ansi.ex
index e541a632ff..4d18461be8 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/ansi.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/ansi.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.ANSI do
def bright(string) do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/code_path.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/code_path.ex
index 32636fac6f..b9eb9a2701 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/code_path.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/code_path.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.CodePath do
alias RabbitMQ.CLI.Core.{Config, Paths, Platform}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/command_modules.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/command_modules.ex
index a1b7fc9237..006119cc7a 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/command_modules.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/command_modules.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.CommandModules do
alias RabbitMQ.CLI.Core.Config
@@ -100,7 +100,7 @@ defmodule RabbitMQ.CLI.Core.CommandModules do
end)
end
- defp make_module_map(modules, scope) do
+ defp make_module_map(modules, scope) when modules != nil do
commands_ns = Regex.recompile!(@commands_ns)
modules
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/config.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/config.ex
index 251f9e582f..b403b18c87 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/config.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/config.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Config do
alias RabbitMQ.CLI.{
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/data_coercion.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/data_coercion.ex
index 9c3d3e7344..94def6536c 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/data_coercion.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/data_coercion.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defprotocol RabbitMQ.CLI.Core.DataCoercion do
def to_atom(data)
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/distribution.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/distribution.ex
index 403c9dd970..4b2423d52c 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/distribution.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/distribution.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Distribution do
alias RabbitMQ.CLI.Core.{ANSI, Config, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/doc_guide.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/doc_guide.ex
index c75dcb0d7c..8992588f9d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/doc_guide.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/doc_guide.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.DocGuide.Macros do
@moduledoc """
@@ -57,8 +57,8 @@ defmodule RabbitMQ.CLI.Core.DocGuide do
Macros.defguide("publishers")
Macros.defguide("plugins")
Macros.defguide("queues")
- Macros.defguide("quorum_queues", domain: "next.rabbitmq.com")
- Macros.defguide("stream_queues", domain: "next.rabbitmq.com")
+ Macros.defguide("quorum_queues")
+ Macros.defguide("streams")
Macros.defguide("runtime_tuning", path_segment: "runtime")
Macros.defguide("tls", path_segment: "ssl")
Macros.defguide("troubleshooting")
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/erl_eval.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/erl_eval.ex
index 8d2e3aff9e..9b35e4a0df 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/erl_eval.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/erl_eval.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.ErlEval do
def parse_expr(expr) do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/exit_codes.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/exit_codes.ex
index 9e416d7153..d087bdd7bf 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/exit_codes.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/exit_codes.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Lists predefined error exit codes used by RabbitMQ CLI tools.
# The codes are adopted from [1], which (according to our team's research)
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/feature_flags.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/feature_flags.ex
index 4b4b8c8d5d..dd8148f354 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/feature_flags.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/feature_flags.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.FeatureFlags do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/helpers.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/helpers.ex
index 97bd7c7bd9..d9834f6cc5 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/helpers.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/helpers.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Helpers do
alias RabbitMQ.CLI.Core.{Config, NodeName}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/input.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/input.ex
index 5e1328be29..4e1fccf4e1 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/input.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/input.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Input do
alias RabbitMQ.CLI.Core.Config
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/listeners.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/listeners.ex
index 0bc162186c..e3bd61b4a0 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/listeners.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/listeners.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Listeners do
import Record, only: [defrecord: 3, extract: 2]
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/log_files.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/log_files.ex
index b6d104bff0..19c4f316f5 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/log_files.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/log_files.ex
@@ -2,13 +2,13 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.LogFiles do
@spec get_log_locations(atom, integer | :infinity) :: [String.t] | {:badrpc, term}
def get_log_locations(node_name, timeout) do
case :rabbit_misc.rpc_call(node_name,
- :rabbit_lager, :log_locations, [],
+ :rabbit, :log_locations, [],
timeout) do
{:badrpc, _} = error -> error;
list -> Enum.map(list, &to_string/1)
@@ -28,7 +28,6 @@ defmodule RabbitMQ.CLI.Core.LogFiles do
location ->
case Enum.member?(log_locations, location) do
true -> {:ok, to_string(location)};
- ## Configured location was not propagated to lager?
false -> {:ok, first_log}
end
end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/memory.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/memory.ex
index 92db5b5502..d388cf1ec8 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/memory.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/memory.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Memory do
alias RabbitMQ.CLI.InformationUnit, as: IU
@@ -56,6 +56,12 @@ defmodule RabbitMQ.CLI.Core.Memory do
def formatted_watermark(val) when is_float(val) do
%{relative: val}
end
+ def formatted_watermark({:relative, val}) when is_float(val) do
+ %{relative: val}
+ end
+ def formatted_watermark(:infinity) do
+ %{relative: 1.0}
+ end
def formatted_watermark({:absolute, val}) do
%{absolute: parse_watermark(val)}
end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_default_virtual_host.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_default_virtual_host.ex
index 94b1b768b6..996dba08c8 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_default_virtual_host.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_default_virtual_host.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be stopped
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_no_defaults.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_no_defaults.ex
index 0ee6f3f05a..34175bd19e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_no_defaults.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/merges_no_defaults.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be stopped
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/networking.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/networking.ex
index 12d99df7c1..b07c7a2d4f 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/networking.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/networking.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Networking do
@type address_family() :: :inet | :inet6
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/node_name.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/node_name.ex
index c39b215ca7..cd43404491 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/node_name.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/node_name.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.NodeName do
alias RabbitMQ.CLI.Core.Config
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/os_pid.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/os_pid.ex
index 0b53d59748..1632b290a6 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/os_pid.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/os_pid.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.OsPid do
@external_process_check_interval 1000
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/output.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/output.ex
index 1b2436cba4..936049a90d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/output.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/output.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Output do
def format_output(:ok, _, _) do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/parser.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/parser.ex
index 28c4df2aa4..bd1aa45815 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/parser.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/parser.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Parser do
alias RabbitMQ.CLI.{CommandBehaviour, FormatterBehaviour}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/paths.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/paths.ex
index 0e90834771..caec7b0b89 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/paths.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/paths.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Paths do
alias RabbitMQ.CLI.Core.Config
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/platform.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/platform.ex
index 561b2adb58..ae9410dc2d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/platform.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/platform.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Platform do
def path_separator() do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_running.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_running.ex
index 7f5337a6e7..de3a15c331 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_running.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_running.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be running
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_stopped.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_stopped.ex
index 48b2b6dcd0..50e8d3aa97 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_stopped.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/requires_rabbit_app_stopped.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Should be used by commands that require rabbit app to be stopped
# but need no other execution environment validators.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/validators.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/validators.ex
index 666d7af065..f87de91064 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/validators.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/validators.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Provides common validation functions.
defmodule RabbitMQ.CLI.Core.Validators do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/version.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/version.ex
index bd5a24f9a0..fe599fc8a0 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/core/version.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/core/version.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Core.Version do
@default_timeout 30_000
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_user_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_user_command.ex
index 514922cac9..ab9c49c7bd 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_user_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_user_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.AddUserCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers, Input}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_vhost_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_vhost_command.ex
index 04c1e61106..70217c307b 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_vhost_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/add_vhost_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.AddVhostCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
@@ -22,6 +22,9 @@ defmodule RabbitMQ.CLI.Ctl.Commands.AddVhostCommand do
def run([vhost], %{node: node_name, description: desc, tags: tags}) do
:rabbit_misc.rpc_call(node_name, :rabbit_vhost, :add, [vhost, desc, parse_tags(tags), Helpers.cli_acting_user()])
end
+ def run([vhost], %{node: node_name, tags: tags}) do
+ :rabbit_misc.rpc_call(node_name, :rabbit_vhost, :add, [vhost, "", parse_tags(tags), Helpers.cli_acting_user()])
+ end
def run([vhost], %{node: node_name}) do
:rabbit_misc.rpc_call(node_name, :rabbit_vhost, :add, [vhost, Helpers.cli_acting_user()])
end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/authenticate_user_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/authenticate_user_command.ex
index 9913633b84..7ae78f94ba 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/authenticate_user_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/authenticate_user_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.AuthenticateUserCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Input}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/autocomplete_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/autocomplete_command.ex
index 19deb74f79..835aefcb1d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/autocomplete_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/autocomplete_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.AutocompleteCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_online_nodes_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_online_nodes_command.ex
index f0d1df6a02..81463161e4 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_online_nodes_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_online_nodes_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.AwaitOnlineNodesCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_startup_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_startup_command.ex
index 9a898224ce..933b4c0da0 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_startup_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/await_startup_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.AwaitStartupCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cancel_sync_queue_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cancel_sync_queue_command.ex
index 2858040039..16e422f29d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cancel_sync_queue_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cancel_sync_queue_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.CancelSyncQueueCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_cluster_node_type_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_cluster_node_type_command.ex
index 93fc9c7da0..999407eb0e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_cluster_node_type_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_cluster_node_type_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ChangeClusterNodeTypeCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_password_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_password_command.ex
index b0dec0a824..15c59f5201 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_password_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/change_password_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ChangePasswordCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers, Input}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_global_parameter_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_global_parameter_command.ex
index c5cedeb96a..1909ff982e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_global_parameter_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_global_parameter_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClearGlobalParameterCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_operator_policy_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_operator_policy_command.ex
index 4b77d4cb38..d13817d9ee 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_operator_policy_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_operator_policy_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClearOperatorPolicyCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_parameter_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_parameter_command.ex
index 3997b1b61f..8dbd310ae5 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_parameter_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_parameter_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClearParameterCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_password_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_password_command.ex
index 398af4813b..bdf2e728e2 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_password_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_password_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClearPasswordCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_permissions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_permissions_command.ex
index 2fd129fffa..9e3c045c74 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_permissions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_permissions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClearPermissionsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_policy_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_policy_command.ex
index 057c2e8c24..6cd58734df 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_policy_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_policy_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClearPolicyCommand do
alias RabbitMQ.CLI.Core.{Helpers, DocGuide}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_topic_permissions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_topic_permissions_command.ex
index 5d0b249db6..1969bed638 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_topic_permissions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_topic_permissions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClearTopicPermissionsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_vhost_limits_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_vhost_limits_command.ex
index a73f0ff670..68d9b78c6c 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_vhost_limits_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/clear_vhost_limits_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClearVhostLimitsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_connections_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_connections_command.ex
index d4c5b5f17a..4399dc14ea 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_connections_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_connections_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.CloseAllConnectionsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_user_connections_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_user_connections_command.ex
new file mode 100644
index 0000000000..184e1df8a7
--- /dev/null
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_all_user_connections_command.ex
@@ -0,0 +1,50 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+
+defmodule RabbitMQ.CLI.Ctl.Commands.CloseAllUserConnectionsCommand do
+ alias RabbitMQ.CLI.Core.DocGuide
+
+ @behaviour RabbitMQ.CLI.CommandBehaviour
+
+ use RabbitMQ.CLI.Core.MergesNoDefaults
+ use RabbitMQ.CLI.Core.AcceptsTwoPositionalArguments
+
+ use RabbitMQ.CLI.Core.RequiresRabbitAppRunning
+
+ def run([username, explanation], %{node: node_name}) do
+ :rabbit_misc.rpc_call(
+ node_name,
+ :rabbit_networking,
+ :close_all_user_connections,
+ [username, explanation]
+ )
+ end
+
+ use RabbitMQ.CLI.DefaultOutput
+
+ def usage, do: "close_all_user_connections <username> <explanation>"
+
+ def usage_additional do
+ [
+ ["<username>", "Self-explanatory"],
+ ["<explanation>", "reason for connection closure, will be logged and provided to clients"]
+ ]
+ end
+
+ def usage_doc_guides() do
+ [
+ DocGuide.connections()
+ ]
+ end
+
+ def help_section(), do: :operations
+
+ def description(),
+ do: "Instructs the broker to close all connections of the specified user"
+
+ def banner([username, explanation], _),
+ do: "Closing connections of user #{username}, reason: #{explanation}..."
+end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_connection_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_connection_command.ex
index 371c582b19..2715725a2d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_connection_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/close_connection_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.CloseConnectionCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cluster_status_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cluster_status_command.ex
index 20f96e8075..cf700577fb 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cluster_status_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/cluster_status_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ClusterStatusCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/decode_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/decode_command.ex
index 015617b102..7c8da87d26 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/decode_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/decode_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
alias RabbitMQ.CLI.Core.Helpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_queue_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_queue_command.ex
index 5e5fe9b9c0..5c5aebe3eb 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_queue_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_queue_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.DeleteQueueCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_user_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_user_command.ex
index 84f00a96f4..8af546e424 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_user_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_user_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.DeleteUserCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_vhost_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_vhost_command.ex
index 8ff6e1f047..4207379e37 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_vhost_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/delete_vhost_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.DeleteVhostCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/enable_feature_flag_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/enable_feature_flag_command.ex
index 6af5a79e49..c08f59fea3 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/enable_feature_flag_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/enable_feature_flag_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.EnableFeatureFlagCommand do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/encode_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/encode_command.ex
index c625b4a5f5..d18f948fce 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/encode_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/encode_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.EncodeCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/environment_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/environment_command.ex
index ac807512a9..73f874ed03 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/environment_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/environment_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.EnvironmentCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_command.ex
index 35fe0a8803..8547bb7b13 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.EvalCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ErlEval, ExitCodes, Input}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_file_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_file_command.ex
index 6f46abbf17..70019df2cd 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_file_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/eval_file_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.EvalFileCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ErlEval, ExitCodes}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/exec_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/exec_command.ex
index 469047c1af..1739bcf33f 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/exec_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/exec_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ExecCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/export_definitions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/export_definitions_command.ex
index e4b026f160..a9cd0b3029 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/export_definitions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/export_definitions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ExportDefinitionsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_boot_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_boot_command.ex
index 261f86c6c1..ea26459893 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_boot_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_boot_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ForceBootCommand do
alias RabbitMQ.CLI.Core.{Config, DocGuide}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_gc_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_gc_command.ex
index 975154be50..5863877904 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_gc_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_gc_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ForceGcCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_reset_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_reset_command.ex
index 5f202f9d08..4c8e6464ae 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_reset_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/force_reset_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ForceResetCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/forget_cluster_node_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/forget_cluster_node_command.ex
index cdf5ae7fbe..43d982e72f 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/forget_cluster_node_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/forget_cluster_node_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ForgetClusterNodeCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Distribution, Validators}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/help_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/help_command.ex
index 8f459cc83f..1148c95cae 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/help_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/help_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
alias RabbitMQ.CLI.CommandBehaviour
@@ -14,7 +14,7 @@ defmodule RabbitMQ.CLI.Ctl.Commands.HelpCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
- def scopes(), do: [:ctl, :diagnostics, :plugins, :queues, :upgrade]
+ def scopes(), do: [:ctl, :diagnostics, :plugins, :queues, :tanzu, :upgrade]
def switches(), do: [list_commands: :boolean]
def distribution(_), do: :none
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/hipe_compile_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/hipe_compile_command.ex
index 13f3468cb6..8af20dc746 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/hipe_compile_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/hipe_compile_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.HipeCompileCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/import_definitions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/import_definitions_command.ex
index 45ca0074f3..680c80de97 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/import_definitions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/import_definitions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ImportDefinitionsCommand do
alias RabbitMQ.CLI.Core.{Config, DocGuide, ExitCodes, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/join_cluster_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/join_cluster_command.ex
index 765fbd43f1..c92965c8c9 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/join_cluster_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/join_cluster_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.JoinClusterCommand do
alias RabbitMQ.CLI.Core.{Config, DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_bindings_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_bindings_command.ex
index 19e8844089..65cf84f304 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_bindings_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_bindings_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListBindingsCommand do
alias RabbitMQ.CLI.Ctl.{InfoKeys, RpcStream}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_channels_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_channels_command.ex
index 5ae7450da1..bbd9ac1faf 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_channels_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_channels_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
##
defmodule RabbitMQ.CLI.Ctl.Commands.ListChannelsCommand do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_ciphers_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_ciphers_command.ex
index eb7075d261..9a0fe8cfff 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_ciphers_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_ciphers_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListCiphersCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_connections_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_connections_command.ex
index 0e28272ea8..7cd3520d3f 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_connections_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_connections_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListConnectionsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_consumers_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_consumers_command.ex
index 90c587cbe8..fa21c36f5d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_consumers_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_consumers_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListConsumersCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_exchanges_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_exchanges_command.ex
index a3b8b3521b..9c18a48826 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_exchanges_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_exchanges_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListExchangesCommand do
alias RabbitMQ.CLI.Ctl.{InfoKeys, RpcStream}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_feature_flags_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_feature_flags_command.ex
index 46b4bc82c2..ec5b590164 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_feature_flags_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_feature_flags_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListFeatureFlagsCommand do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_global_parameters_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_global_parameters_command.ex
index 8d3f2d795a..f273eadfa3 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_global_parameters_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_global_parameters_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListGlobalParametersCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_hashes_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_hashes_command.ex
index 9e0f25e6dd..78c084ffda 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_hashes_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_hashes_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListHashesCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_operator_policies_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_operator_policies_command.ex
index dd2c54dfc0..1cc156cdb3 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_operator_policies_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_operator_policies_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListOperatorPoliciesCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_parameters_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_parameters_command.ex
index 2d51f08527..4674d78d05 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_parameters_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_parameters_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListParametersCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_permissions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_permissions_command.ex
index feaf917cfa..0f667c9658 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_permissions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_permissions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListPermissionsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_policies_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_policies_command.ex
index 9fe8e37dc1..c75b5fa6dd 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_policies_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_policies_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListPoliciesCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_queues_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_queues_command.ex
index 50cd5aa7d0..400f0b1686 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_queues_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_queues_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListQueuesCommand do
require RabbitMQ.CLI.Ctl.InfoKeys
@@ -21,7 +21,9 @@ defmodule RabbitMQ.CLI.Ctl.Commands.ListQueuesCommand do
messages_persistent message_bytes message_bytes_ready
message_bytes_unacknowledged message_bytes_ram message_bytes_persistent
head_message_timestamp disk_reads disk_writes consumers
- consumer_utilisation memory slave_pids synchronised_slave_pids state type
+ # these are aliases
+ consumer_utilisation consumer_capacity
+ memory slave_pids synchronised_slave_pids state type
leader members online confirm_on)a
def description(), do: "Lists queues and their properties"
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_topic_permissions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_topic_permissions_command.ex
index 1a22b3b26d..6848ce313d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_topic_permissions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_topic_permissions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListTopicPermissionsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_unresponsive_queues_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_unresponsive_queues_command.ex
index 91d6d624f5..5da18f8697 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_unresponsive_queues_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_unresponsive_queues_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListUnresponsiveQueuesCommand do
require RabbitMQ.CLI.Ctl.InfoKeys
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_permissions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_permissions_command.ex
index bd302eefd0..82f35ac865 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_permissions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_permissions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListUserPermissionsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_topic_permissions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_topic_permissions_command.ex
index 48b7fee5e2..8ca23970d8 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_topic_permissions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_user_topic_permissions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListUserTopicPermissionsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_users_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_users_command.ex
index e87ea386d0..86e20cf9f7 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_users_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_users_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListUsersCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhost_limits_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhost_limits_command.ex
index 67b138f1e0..0f7cce3798 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhost_limits_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhost_limits_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListVhostLimitsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhosts_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhosts_command.ex
index b570aa7486..5f5e1e67e0 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhosts_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/list_vhosts_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ListVhostsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/node_health_check_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/node_health_check_command.ex
index 31ea748d9f..a0888779a1 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/node_health_check_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/node_health_check_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.NodeHealthCheckCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/ping_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/ping_command.ex
index 7efb3b39f3..72b9a07db3 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/ping_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/ping_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.PingCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/purge_queue_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/purge_queue_command.ex
index 1be25beb7d..8c14c49112 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/purge_queue_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/purge_queue_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.PurgeQueueCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rename_cluster_node_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rename_cluster_node_command.ex
index 7faa30d00b..b32d5d6ff4 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rename_cluster_node_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rename_cluster_node_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.RenameClusterNodeCommand do
require Integer
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/report_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/report_command.ex
index c06497a7e6..7dfb10a375 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/report_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/report_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ReportCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/reset_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/reset_command.ex
index 575ef2491d..44b77f6bef 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/reset_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/reset_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ResetCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/restart_vhost_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/restart_vhost_command.ex
index 36a7f702bc..c3157cd16a 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/restart_vhost_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/restart_vhost_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
alias RabbitMQ.CLI.Core.ExitCodes
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/resume_listeners_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/resume_listeners_command.ex
index 1f13660e0d..39dc5b1630 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/resume_listeners_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/resume_listeners_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ResumeListenersCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rotate_logs_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rotate_logs_command.ex
index f3de3671fc..a7b8756e38 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rotate_logs_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/rotate_logs_command.ex
@@ -2,10 +2,10 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.RotateLogsCommand do
- alias RabbitMQ.CLI.Core.DocGuide
+ alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes}
@behaviour RabbitMQ.CLI.CommandBehaviour
@@ -13,8 +13,12 @@ defmodule RabbitMQ.CLI.Ctl.Commands.RotateLogsCommand do
use RabbitMQ.CLI.Core.AcceptsNoPositionalArguments
use RabbitMQ.CLI.Core.RequiresRabbitAppRunning
- def run([], %{node: node_name}) do
- :rabbit_misc.rpc_call(node_name, :rabbit, :rotate_logs, [])
+ def run([], _) do
+ {
+ :error,
+ ExitCodes.exit_unavailable(),
+ "This command does not rotate logs anymore [deprecated]"
+ }
end
use RabbitMQ.CLI.DefaultOutput
@@ -28,7 +32,7 @@ defmodule RabbitMQ.CLI.Ctl.Commands.RotateLogsCommand do
def help_section(), do: :node_management
- def description(), do: "Instructs the RabbitMQ node to perform internal log rotation"
+ def description(), do: "Does nothing [deprecated]"
- def banner(_, %{node: node_name}), do: "Rotating logs for node #{node_name} ..."
+ def banner(_, _), do: nil
end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_cluster_name_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_cluster_name_command.ex
index f919cb2ae6..a970e5f03a 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_cluster_name_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_cluster_name_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetClusterNameCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_disk_free_limit_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_disk_free_limit_command.ex
index cf97c4655e..b7beeb13e7 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_disk_free_limit_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_disk_free_limit_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetDiskFreeLimitCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_global_parameter_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_global_parameter_command.ex
index 8c46e9d592..81f7fc7c47 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_global_parameter_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_global_parameter_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetGlobalParameterCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_log_level_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_log_level_command.ex
index f5a8eacbfc..877707bf9e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_log_level_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_log_level_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetLogLevelCommand do
alias RabbitMQ.CLI.Core.DocGuide
@@ -42,7 +42,7 @@ defmodule RabbitMQ.CLI.Ctl.Commands.SetLogLevelCommand do
def run([log_level], %{node: node_name}) do
arg = String.to_atom(log_level)
- :rabbit_misc.rpc_call(node_name, :rabbit_lager, :set_log_level, [arg])
+ :rabbit_misc.rpc_call(node_name, :rabbit, :set_log_level, [arg])
end
def usage, do: "set_log_level <log_level>"
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_operator_policy_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_operator_policy_command.ex
index 3118c125cb..4bb00d1735 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_operator_policy_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_operator_policy_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetOperatorPolicyCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_parameter_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_parameter_command.ex
index 910cc6ef73..b1fe1e1cba 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_parameter_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_parameter_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetParameterCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_permissions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_permissions_command.ex
index c87969121c..868da26ebb 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_permissions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_permissions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetPermissionsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_policy_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_policy_command.ex
index af34f3c659..3f68ebd023 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_policy_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_policy_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetPolicyCommand do
alias RabbitMQ.CLI.Core.{Helpers, DocGuide}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_topic_permissions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_topic_permissions_command.ex
index c57dc1659b..0852bfd4a2 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_topic_permissions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_topic_permissions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetTopicPermissionsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_user_tags_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_user_tags_command.ex
index eba8ed6123..5e448b027b 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_user_tags_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_user_tags_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetUserTagsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_limits_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_limits_command.ex
index f25f1c7bc4..3c6d725ea7 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_limits_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_limits_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetVhostLimitsCommand do
alias RabbitMQ.CLI.Core.{DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_tags_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_tags_command.ex
new file mode 100644
index 0000000000..1a262d2a26
--- /dev/null
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vhost_tags_command.ex
@@ -0,0 +1,60 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+
+defmodule RabbitMQ.CLI.Ctl.Commands.SetVhostTagsCommand do
+ alias RabbitMQ.CLI.Core.{DocGuide, ExitCodes, Helpers}
+
+ @behaviour RabbitMQ.CLI.CommandBehaviour
+
+ def merge_defaults(args, opts), do: {args, opts}
+
+ def validate([], _) do
+ {:validation_failure, :not_enough_args}
+ end
+ def validate(_, _), do: :ok
+
+ use RabbitMQ.CLI.Core.RequiresRabbitAppRunning
+
+ def run([vhost | tags], %{node: node_name}) do
+ case :rabbit_misc.rpc_call(
+ node_name, :rabbit_vhost, :update_tags, [vhost, tags, Helpers.cli_acting_user()]) do
+ {:error, _} = err -> err
+ {:badrpc, _} = err -> err
+ _ -> :ok
+ end
+ end
+
+ def output({:error, {:no_such_vhost, vhost}}, %{node: node_name, formatter: "json"}) do
+ {:error, %{"result" => "error", "node" => node_name, "message" => "Virtual host \"#{vhost}\" does not exists"}}
+ end
+ def output({:error, {:no_such_vhost, vhost}}, _) do
+ {:error, ExitCodes.exit_dataerr(), "Virtual host \"#{vhost}\" does not exist"}
+ end
+ use RabbitMQ.CLI.DefaultOutput
+
+ def usage, do: "set_vhost_tags <vhost> <tag> [...]"
+
+ def usage_additional() do
+ [
+ ["<vhost>", "Self-explanatory"],
+ ["<tags>", "Space separated list of tags"]
+ ]
+ end
+
+ def usage_doc_guides() do
+ [
+ DocGuide.virtual_hosts()
+ ]
+ end
+
+ def help_section(), do: :virtual_hosts
+
+ def description(), do: "Sets virtual host tags"
+
+ def banner([vhost | tags], _) do
+ "Setting tags for virtual host \"#{vhost}\" to [#{tags |> Enum.join(", ")}] ..."
+ end
+end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vm_memory_high_watermark_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vm_memory_high_watermark_command.ex
index a4e4527f8f..b0c58b38cc 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vm_memory_high_watermark_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/set_vm_memory_high_watermark_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SetVmMemoryHighWatermarkCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/shutdown_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/shutdown_command.ex
index 10700bf309..612bb2da99 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/shutdown_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/shutdown_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.ShutdownCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/start_app_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/start_app_command.ex
index 900bd762fa..9fafed8d2b 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/start_app_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/start_app_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.StartAppCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/status_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/status_command.ex
index 582f514f27..f4261ddfab 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/status_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/status_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.StatusCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_app_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_app_command.ex
index c3cbe3b1fd..a538f752fe 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_app_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_app_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.StopAppCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_command.ex
index becb75a0b5..336bfd2055 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/stop_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.StopCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/suspend_listeners_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/suspend_listeners_command.ex
index 31fcf738b9..ced30a65aa 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/suspend_listeners_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/suspend_listeners_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SuspendListenersCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/sync_queue_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/sync_queue_command.ex
index 4b7112af57..201bcefe2d 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/sync_queue_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/sync_queue_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.SyncQueueCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_off_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_off_command.ex
index f2b6cc217f..d73919d208 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_off_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_off_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.TraceOffCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_on_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_on_command.ex
index 33bb5a06d6..c522b480b8 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_on_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/trace_on_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.TraceOnCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/update_cluster_nodes_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/update_cluster_nodes_command.ex
index 94b218e2c9..e970977df7 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/update_cluster_nodes_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/update_cluster_nodes_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.UpdateClusterNodesCommand do
alias RabbitMQ.CLI.Core.{Config, DocGuide, Helpers}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/version_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/version_command.ex
index 8028054932..b65c7b70ca 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/version_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/version_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.VersionCommand do
alias RabbitMQ.CLI.Core.{Validators, Version}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/wait_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/wait_command.ex
index 0699203de6..327665c22e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/wait_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/commands/wait_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.Commands.WaitCommand do
alias RabbitMQ.CLI.Core.{Helpers, Validators}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/info_keys.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/info_keys.ex
index 26f86ae51e..784690caa2 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/info_keys.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/info_keys.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.InfoKeys do
import RabbitCommon.Records
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/rpc_stream.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/rpc_stream.ex
index 4b672a6d88..3582f55873 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/rpc_stream.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/ctl/rpc_stream.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Ctl.RpcStream do
alias RabbitMQ.CLI.Ctl.InfoKeys
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/default_output.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/default_output.ex
index d5e3f94a15..f70b67e419 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/default_output.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/default_output.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
alias RabbitMQ.CLI.Formatters.FormatterHelpers
defmodule RabbitMQ.CLI.DefaultOutput do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/alarms_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/alarms_command.ex
index 7669a523eb..14c85f935b 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/alarms_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/alarms_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.AlarmsCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/certificates_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/certificates_command.ex
index 33320d8e37..4e79a272ab 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/certificates_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/certificates_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CertificatesCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_alarms_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_alarms_command.ex
index 04bb70317a..d13467803a 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_alarms_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_alarms_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CheckAlarmsCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_certificate_expiration_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_certificate_expiration_command.ex
index d14ade59f6..7bde96001e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_certificate_expiration_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_certificate_expiration_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CheckCertificateExpirationCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_local_alarms_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_local_alarms_command.ex
index 1b11537793..22b57a15ee 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_local_alarms_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_local_alarms_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CheckLocalAlarmsCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_connectivity_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_connectivity_command.ex
index 1c3d86ed83..021995a986 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_connectivity_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_connectivity_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CheckPortConnectivityCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_listener_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_listener_command.ex
index f321d444db..f1294d2a34 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_listener_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_port_listener_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CheckPortListenerCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_protocol_listener_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_protocol_listener_command.ex
index 10c81c971e..87cd8e9d89 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_protocol_listener_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_protocol_listener_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CheckProtocolListenerCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_running_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_running_command.ex
index 690f17e1e7..d7e1b09631 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_running_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_running_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CheckRunningCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_virtual_hosts_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_virtual_hosts_command.ex
index b3169b522d..862401b01b 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_virtual_hosts_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/check_virtual_hosts_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CheckVirtualHostsCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/cipher_suites_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/cipher_suites_command.ex
index 86e8eee3a4..8ee671bd90 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/cipher_suites_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/cipher_suites_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CipherSuitesCommand do
alias RabbitMQ.CLI.Core.Helpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/command_line_arguments_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/command_line_arguments_command.ex
index adbf14cfc3..51a292eee8 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/command_line_arguments_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/command_line_arguments_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.CommandLineArgumentsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/consume_event_stream_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/consume_event_stream_command.ex
index e7ad171d11..4a3bd8770a 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/consume_event_stream_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/consume_event_stream_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ConsumeEventStreamCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/disable_auth_attempt_source_tracking_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/disable_auth_attempt_source_tracking_command.ex
index df182a0c97..7159541bfc 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/disable_auth_attempt_source_tracking_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/disable_auth_attempt_source_tracking_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.DisableAuthAttemptSourceTrackingCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/discover_peers_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/discover_peers_command.ex
index b23a13e370..b9d66be625 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/discover_peers_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/discover_peers_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.DiscoverPeersCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/enable_auth_attempt_source_tracking_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/enable_auth_attempt_source_tracking_command.ex
index 832891094b..db52c4be45 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/enable_auth_attempt_source_tracking_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/enable_auth_attempt_source_tracking_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.EnableAuthAttemptSourceTrackingCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_hash_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_hash_command.ex
index b6e3186c94..c5601475ff 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_hash_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_hash_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ErlangCookieHashCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_sources_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_sources_command.ex
index 578ba31c73..18c9ccbed9 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_sources_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_cookie_sources_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ErlangCookieSourcesCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_version_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_version_command.ex
index 053e0d142e..489ed14091 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_version_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/erlang_version_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ErlangVersionCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_booting_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_booting_command.ex
index 56b2253c90..3f69bde9fd 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_booting_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_booting_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.IsBootingCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_running_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_running_command.ex
index ecf5ce9368..54e67e069c 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_running_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/is_running_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.IsRunningCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_network_interfaces_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_network_interfaces_command.ex
index d41409b8c4..b51d8ab5de 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_network_interfaces_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_network_interfaces_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ListNetworkInterfacesCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_node_auth_attempt_stats_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_node_auth_attempt_stats_command.ex
index 4793cf6c46..a0817152c2 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_node_auth_attempt_stats_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/list_node_auth_attempt_stats_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ListNodeAuthAttemptStatsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/listeners_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/listeners_command.ex
index f54ce3775e..6dd7090984 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/listeners_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/listeners_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ListenersCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_location_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_location_command.ex
index 36ff562b41..efe483bf22 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_location_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_location_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.LogLocationCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_command.ex
index 9717908f60..20dd315b38 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.LogTailCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_stream_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_stream_command.ex
index 5080fd0d1d..dd26bb04b1 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_stream_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/log_tail_stream_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.LogTailStreamCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/maybe_stuck_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/maybe_stuck_command.ex
index c241780f62..042f071f7e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/maybe_stuck_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/maybe_stuck_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.MaybeStuckCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/memory_breakdown_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/memory_breakdown_command.ex
index 356358b7d7..92f287c04e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/memory_breakdown_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/memory_breakdown_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.MemoryBreakdownCommand do
alias RabbitMQ.CLI.InformationUnit, as: IU
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/observer_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/observer_command.ex
index 717e23e6b5..aab0a23be4 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/observer_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/observer_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ObserverCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/os_env_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/os_env_command.ex
index 63e8c18beb..5ee85826e4 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/os_env_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/os_env_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.OsEnvCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/remote_shell_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/remote_shell_command.ex
new file mode 100644
index 0000000000..23dc2777fc
--- /dev/null
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/remote_shell_command.ex
@@ -0,0 +1,35 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+
+defmodule RabbitMQ.CLI.Diagnostics.Commands.RemoteShellCommand do
+ @behaviour RabbitMQ.CLI.CommandBehaviour
+
+ use RabbitMQ.CLI.Core.MergesNoDefaults
+ use RabbitMQ.CLI.Core.AcceptsNoPositionalArguments
+
+ def run([], %{node: node_name}) do
+ _ = Supervisor.terminate_child(:kernel_sup, :user)
+ Process.flag(:trap_exit, true)
+ user_drv = :user_drv.start(['tty_sl -c -e', {node_name, :shell, :start, []}])
+ Process.link(user_drv)
+ receive do
+ {'EXIT', _user_drv, _} ->
+ {:ok, "Disconnected from #{node_name}."}
+ end
+ end
+
+ use RabbitMQ.CLI.DefaultOutput
+
+ def help_section(), do: :observability_and_health_checks
+
+ def description(), do: "Starts an interactive Erlang shell on the target node"
+
+ def usage, do: "remote_shell"
+
+ def banner(_, %{node: node_name}) do
+ "Starting an interactive Erlang shell on node #{node_name}... Press 'Ctrl+G' then 'q' to exit."
+ end
+end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/reset_node_auth_attempt_metrics_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/reset_node_auth_attempt_metrics_command.ex
index e3b08c2ac8..3914df1ec7 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/reset_node_auth_attempt_metrics_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/reset_node_auth_attempt_metrics_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ResetNodeAuthAttemptMetricsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolve_hostname_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolve_hostname_command.ex
index 349dbee513..5721f17344 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolve_hostname_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolve_hostname_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ResolveHostnameCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolver_info_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolver_info_command.ex
index a4f3d8d7d3..c80479b290 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolver_info_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/resolver_info_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ResolverInfoCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/runtime_thread_stats_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/runtime_thread_stats_command.ex
index ee5bb56566..13f9c3f8c1 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/runtime_thread_stats_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/runtime_thread_stats_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.RuntimeThreadStatsCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/schema_info_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/schema_info_command.ex
index 50b750c772..a9aac7dff7 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/schema_info_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/schema_info_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.SchemaInfoCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/server_version_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/server_version_command.ex
index 9f4068e459..68dc65a682 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/server_version_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/server_version_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.ServerVersionCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/tls_versions_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/tls_versions_command.ex
index 2f81bad889..5af79c64dc 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/tls_versions_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/commands/tls_versions_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Commands.TlsVersionsCommand do
@behaviour RabbitMQ.CLI.CommandBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/diagnostics_helpers.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/diagnostics_helpers.ex
index 601cc842cb..6536da3e4a 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/diagnostics_helpers.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/diagnostics/diagnostics_helpers.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Diagnostics.Helpers do
def test_connection(hostname, port, timeout) do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatter_behaviour.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatter_behaviour.ex
index 498ba114b9..87d6836bb1 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatter_behaviour.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatter_behaviour.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Formats returned values e.g. to human-readable text or JSON.
defmodule RabbitMQ.CLI.FormatterBehaviour do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/csv.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/csv.ex
index ab9acd613f..aa5672b8a7 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/csv.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/csv.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
alias RabbitMQ.CLI.Formatters.FormatterHelpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/erlang.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/erlang.ex
index 0a8a78249f..7e8377a8c9 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/erlang.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/erlang.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Formatters.Erlang do
@behaviour RabbitMQ.CLI.FormatterBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/formatter_helpers.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/formatter_helpers.ex
index 2ec4edc3d9..e3c91fb0bc 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/formatter_helpers.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/formatter_helpers.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Formatters.FormatterHelpers do
import RabbitCommon.Records
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/inspect.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/inspect.ex
index 5939007cfe..c285bc9ae3 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/inspect.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/inspect.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
alias RabbitMQ.CLI.Formatters.FormatterHelpers
defmodule RabbitMQ.CLI.Formatters.Inspect do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json.ex
index eb55038715..fb998c6333 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Basic JSON formatter. Supports 1-level of
# collection using start/finish_collection.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json_stream.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json_stream.ex
index a1bea3fc11..2d34be5590 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json_stream.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/json_stream.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Basic JSON formatter. Supports 1-level of
# collection using start/finish_collection.
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/msacc.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/msacc.ex
index 992475a2d0..53ef4aae03 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/msacc.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/msacc.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Formatters.Msacc do
@behaviour RabbitMQ.CLI.FormatterBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/plugins.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/plugins.ex
index 54881cc32f..e61de99560 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/plugins.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/plugins.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
alias RabbitMQ.CLI.Formatters.FormatterHelpers
defmodule RabbitMQ.CLI.Formatters.Plugins do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/pretty_table.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/pretty_table.ex
index 6b9b7ed9fd..be97dde2b4 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/pretty_table.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/pretty_table.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Formatters.PrettyTable do
@behaviour RabbitMQ.CLI.FormatterBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/report.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/report.ex
index 4db89d611f..958ad0429a 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/report.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/report.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Formatters.Report do
alias RabbitMQ.CLI.Formatters.FormatterHelpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string.ex
index 6fd7f2e0e3..416a4e5d77 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
## Prints values from a command as strings(if possible)
defmodule RabbitMQ.CLI.Formatters.String do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string_per_line.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string_per_line.ex
index 4761b9a555..31e4eea5c4 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string_per_line.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/string_per_line.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Formatters.StringPerLine do
@doc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/table.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/table.ex
index 72d1682202..902a395662 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/table.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/formatters/table.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
alias RabbitMQ.CLI.Formatters.FormatterHelpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/information_unit.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/information_unit.ex
index ebef8de0ba..d4e0c09ba1 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/information_unit.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/information_unit.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.InformationUnit do
require MapSet
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/directories_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/directories_command.ex
index c3b6aecc0d..fe7db99b5c 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/directories_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/directories_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Plugins.Commands.DirectoriesCommand do
alias RabbitMQ.CLI.Plugins.Helpers, as: PluginHelpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/disable_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/disable_command.ex
index 4fea2ad34e..af9185207b 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/disable_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/disable_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Plugins.Commands.DisableCommand do
alias RabbitMQ.CLI.Plugins.Helpers, as: PluginHelpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/enable_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/enable_command.ex
index 530a2cbb6a..32e6b6bd22 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/enable_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/enable_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Plugins.Commands.EnableCommand do
alias RabbitMQ.CLI.Plugins.Helpers, as: PluginHelpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/is_enabled.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/is_enabled.ex
index fa54b1eee3..9436798d32 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/is_enabled.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/is_enabled.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Plugins.Commands.IsEnabledCommand do
alias RabbitMQ.CLI.Plugins.Helpers, as: PluginHelpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/list_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/list_command.ex
index a4e943a149..23582e8918 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/list_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/list_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Plugins.Commands.ListCommand do
import RabbitCommon.Records
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/set_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/set_command.ex
index 68b442a547..e23d20e726 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/set_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/commands/set_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Plugins.Commands.SetCommand do
alias RabbitMQ.CLI.Plugins.Helpers, as: PluginHelpers
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/error_output.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/error_output.ex
index 51c75ed99a..623d351290 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/error_output.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/error_output.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
# Default output implementation for plugin commands
defmodule RabbitMQ.CLI.Plugins.ErrorOutput do
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/plugins_helpers.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/plugins_helpers.ex
index bf8b4f772b..cd2184d11f 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/plugins_helpers.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/plugins/plugins_helpers.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Plugins.Helpers do
import RabbitMQ.CLI.Core.DataCoercion
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/printer_behaviour.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/printer_behaviour.ex
index b2bedfdaad..b74f67ae62 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/printer_behaviour.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/printer_behaviour.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.PrinterBehaviour do
@callback init(options :: map()) :: {:ok, printer_state :: any} | {:error, error :: any}
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/file.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/file.ex
index ba0daaeebb..51f025da17 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/file.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/file.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Printers.File do
@behaviour RabbitMQ.CLI.PrinterBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io.ex
index 206feff56d..208c372ee0 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Printers.StdIO do
@behaviour RabbitMQ.CLI.PrinterBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io_raw.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io_raw.ex
index 16846907b4..edbeb1a9f4 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io_raw.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/printers/std_io_raw.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Printers.StdIORaw do
@behaviour RabbitMQ.CLI.PrinterBehaviour
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/add_member_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/add_member_command.ex
index e789d00343..e01c332dd0 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/add_member_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/add_member_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.AddMemberCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_mirror_sync_critical_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_mirror_sync_critical_command.ex
index c31b83d29c..2184778576 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_mirror_sync_critical_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_mirror_sync_critical_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.CheckIfNodeIsMirrorSyncCriticalCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_quorum_critical_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_quorum_critical_command.ex
index d8f4a34c1c..688d0499b3 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_quorum_critical_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/check_if_node_is_quorum_critical_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.CheckIfNodeIsQuorumCriticalCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/delete_member_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/delete_member_command.ex
index 1579bf6809..c6ff515009 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/delete_member_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/delete_member_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.DeleteMemberCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/grow_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/grow_command.ex
index 4e0ce903fe..aacebe2b7e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/grow_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/grow_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.GrowCommand do
alias RabbitMQ.CLI.Core.DocGuide
@@ -78,7 +78,7 @@ defmodule RabbitMQ.CLI.Queues.Commands.GrowCommand do
def usage_additional do
[
["<node>", "node name to place replicas on"],
- ["<all | even>", "how many matching quorum queues should have a replica added on this node: all or half (evenly numbered)?"],
+ ["<all | even>", "add a member for all matching queues or just those whose membership count is an even number"],
["--queue-pattern <pattern>", "regular expression to match queue names"],
["--vhost-pattern <pattern>", "regular expression to match virtual host names"],
["--errors-only", "only list queues which reported an error"]
@@ -93,7 +93,7 @@ defmodule RabbitMQ.CLI.Queues.Commands.GrowCommand do
def help_section, do: :cluster_management
- def description, do: "Grows quorum queue clusters by adding a member (replica) to all or half of matching quorum queues on the given node."
+ def description, do: "Grows quorum queue clusters by adding a member (replica) on the specified node for all matching queues"
def banner([node, strategy], _) do
"Growing #{strategy} quorum queues on #{node}..."
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/peek_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/peek_command.ex
index a159c119e9..02260bbfb4 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/peek_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/peek_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.PeekCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/quorum_status_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/quorum_status_command.ex
index 01a61b2536..5a3bd3f90c 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/quorum_status_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/quorum_status_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.QuorumStatusCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/rebalance_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/rebalance_command.ex
index 1416a7c570..b122a5162e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/rebalance_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/rebalance_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.RebalanceCommand do
alias RabbitMQ.CLI.Core.DocGuide
@@ -75,10 +75,18 @@ defmodule RabbitMQ.CLI.Queues.Commands.RebalanceCommand do
def help_section, do: :cluster_management
- def description, do: "Rebalances queues."
+ def description, do: "Re-balances leaders of replicated queues across up-and-running cluster nodes"
+ def banner([:all], _) do
+ "Re-balancing leaders of all replicated queues..."
+ end
+ def banner([:classic], _) do
+ "Re-balancing leaders of replicated (mirrored, non-exclusive) classic queues..."
+ end
+ def banner([:quorum], _) do
+ "Re-balancing leaders of quorum queues..."
+ end
def banner([type], _) do
- "Rebalancing #{type} queues..."
+ "Re-balancing leaders of #{type} queues..."
end
-
end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/reclaim_quorum_memory_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/reclaim_quorum_memory_command.ex
index 3452cc8741..571aa6ff9e 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/reclaim_quorum_memory_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/reclaim_quorum_memory_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.ReclaimQuorumMemoryCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/shrink_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/shrink_command.ex
index 1bff2b9a1c..d576a00e68 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/shrink_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/queues/commands/shrink_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Queues.Commands.ShrinkCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/add_replica_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/add_replica_command.ex
index 8dc6da0281..260833e48b 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/add_replica_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/add_replica_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Streams.Commands.AddReplicaCommand do
alias RabbitMQ.CLI.Core.DocGuide
@@ -48,7 +48,7 @@ defmodule RabbitMQ.CLI.Streams.Commands.AddReplicaCommand do
def usage_doc_guides() do
[
- DocGuide.stream_queues()
+ DocGuide.streams()
]
end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/delete_replica_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/delete_replica_command.ex
index ca15282949..5a3a3511cd 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/delete_replica_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/delete_replica_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Streams.Commands.DeleteReplicaCommand do
alias RabbitMQ.CLI.Core.DocGuide
@@ -29,6 +29,8 @@ defmodule RabbitMQ.CLI.Streams.Commands.DeleteReplicaCommand do
{:error, :quorum_queue_not_supported} ->
{:error, "Cannot delete replicas from a quorum queue"}
+ {:error, :last_stream_member} ->
+ {:error, "Cannot delete the last member of a stream"}
other ->
other
end
@@ -47,7 +49,7 @@ defmodule RabbitMQ.CLI.Streams.Commands.DeleteReplicaCommand do
def usage_doc_guides() do
[
- DocGuide.stream_queues()
+ DocGuide.streams()
]
end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/set_stream_retention_policy_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/set_stream_retention_policy_command.ex
index ee89a1ec57..db32f2de62 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/set_stream_retention_policy_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/set_stream_retention_policy_command.ex
@@ -1,17 +1,9 @@
-## The contents of this file are subject to the Mozilla Public License
-## Version 1.1 (the "License"); you may not use this file except in
-## compliance with the License. You may obtain a copy of the License
-## at https://www.mozilla.org/MPL/
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Software distributed under the License is distributed on an "AS IS"
-## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-## the License for the specific language governing rights and
-## limitations under the License.
-##
-## The Original Code is RabbitMQ.
-##
-## The Initial Developer of the Original Code is GoPivotal, Inc.
-## Copyright (c) 2007-2020 Pivotal Software, Inc. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+
defmodule RabbitMQ.CLI.Streams.Commands.SetStreamRetentionPolicyCommand do
alias RabbitMQ.CLI.Core.DocGuide
@@ -48,7 +40,7 @@ defmodule RabbitMQ.CLI.Streams.Commands.SetStreamRetentionPolicyCommand do
def usage_doc_guides() do
[
- DocGuide.stream_queues()
+ DocGuide.streams()
]
end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/stream_status_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/stream_status_command.ex
new file mode 100644
index 0000000000..6f5bf510c0
--- /dev/null
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/streams/commands/stream_status_command.ex
@@ -0,0 +1,71 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+
+defmodule RabbitMQ.CLI.Streams.Commands.StreamStatusCommand do
+ alias RabbitMQ.CLI.Core.DocGuide
+
+ @behaviour RabbitMQ.CLI.CommandBehaviour
+ def scopes(), do: [:diagnostics, :queues, :streams]
+
+ def merge_defaults(args, opts), do: {args, Map.merge(%{tracking: false, vhost: "/"}, opts)}
+
+ def switches(), do: [tracking: :boolean]
+
+ use RabbitMQ.CLI.Core.AcceptsOnePositionalArgument
+ use RabbitMQ.CLI.Core.RequiresRabbitAppRunning
+
+ def run([name] = _args, %{node: node_name, vhost: vhost, tracking: :false}) do
+ case :rabbit_misc.rpc_call(node_name, :rabbit_stream_queue, :status, [vhost, name]) do
+ {:error, :classic_queue_not_supported} ->
+ {:error, "Cannot get stream status of a classic queue"}
+
+ {:error, :quorum_queue_not_supported} ->
+ {:error, "Cannot get stream status of a quorum queue"}
+
+ other ->
+ other
+ end
+ end
+ def run([name] = _args, %{node: node_name, vhost: vhost, tracking: :true}) do
+ case :rabbit_misc.rpc_call(node_name, :rabbit_stream_queue, :tracking_status, [vhost, name]) do
+ {:error, :classic_queue_not_supported} ->
+ {:error, "Cannot get stream status of a classic queue"}
+
+ {:error, :quorum_queue_not_supported} ->
+ {:error, "Cannot get stream status of a quorum queue"}
+
+ other ->
+ other
+ end
+ end
+
+ use RabbitMQ.CLI.DefaultOutput
+
+ def formatter(), do: RabbitMQ.CLI.Formatters.PrettyTable
+
+ def usage() do
+ "stream_status [--vhost <vhost>] [--tracking] <stream>"
+ end
+
+ def usage_additional do
+ [
+ ["<stream>", "Name of the stream"]
+ ]
+ end
+
+ def usage_doc_guides() do
+ [
+ DocGuide.streams()
+ ]
+ end
+
+ def help_section(), do: :observability_and_health_checks
+
+ def description(), do: "Displays the status of a stream"
+
+ def banner([name], %{node: node_name}),
+ do: "Status of stream #{name} on node #{node_name} ..."
+end
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/time_unit.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/time_unit.ex
index fa08c4befe..b04b6e667f 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/time_unit.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/time_unit.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.TimeUnit do
require MapSet
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_quorum_plus_one_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_quorum_plus_one_command.ex
index ca00ddbbb7..f592fd19ca 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_quorum_plus_one_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_quorum_plus_one_command.ex
@@ -2,10 +2,11 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Upgrade.Commands.AwaitOnlineQuorumPlusOneCommand do
alias RabbitMQ.CLI.Core.DocGuide
+ import RabbitMQ.CLI.Core.Config, only: [output_less?: 1]
@behaviour RabbitMQ.CLI.CommandBehaviour
@@ -28,19 +29,38 @@ defmodule RabbitMQ.CLI.Upgrade.Commands.AwaitOnlineQuorumPlusOneCommand do
def run([], %{node: node_name, timeout: timeout}) do
rpc_timeout = timeout + 500
- case :rabbit_misc.rpc_call(node_name, :rabbit_upgrade_preparation, :await_online_quorum_plus_one, [timeout], rpc_timeout) do
- {:error, _} = err -> err
- {:error, _, _} = err -> err
- {:badrpc, _} = err -> err
+ case :rabbit_misc.rpc_call(node_name, :rabbit_nodes, :is_single_node_cluster, [], rpc_timeout) do
+ # if target node is the only one in the cluster, the command makes little sense
+ # and false positives can be misleading
+ true -> {:ok, :single_node_cluster}
+ false ->
+ case :rabbit_misc.rpc_call(node_name, :rabbit_upgrade_preparation, :await_online_quorum_plus_one, [timeout], rpc_timeout) do
+ {:error, _} = err -> err
+ {:error, _, _} = err -> err
+ {:badrpc, _} = err -> err
- true -> :ok
- false -> {:error, "time is up, no quorum + 1 online replicas came online for at least some quorum queues"}
+ true -> :ok
+ false -> {:error, "time is up, no quorum + 1 online replicas came online for at least some quorum queues"}
+ end
+ other -> other
end
end
+ def output({:ok, :single_node_cluster}, %{formatter: "json"}) do
+ {:ok, %{
+ "result" => "ok",
+ "message" => "Target node seems to be the only one in a single node cluster, the check does not apply"
+ }}
+ end
def output({:error, msg}, %{node: node_name, formatter: "json"}) do
{:error, %{"result" => "error", "node" => node_name, "message" => msg}}
end
+ def output({:ok, :single_node_cluster}, opts) do
+ case output_less?(opts) do
+ true -> :ok;
+ false -> {:ok, "Target node seems to be the only one in a single node cluster, the command does not apply"}
+ end
+ end
use RabbitMQ.CLI.DefaultOutput
def usage, do: "await_online_quorum_plus_one"
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_synchronized_mirror_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_synchronized_mirror_command.ex
index d6fb40bad2..e3f7f8c061 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_synchronized_mirror_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/await_online_synchronized_mirror_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Upgrade.Commands.AwaitOnlineSynchronizedMirrorCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/drain_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/drain_command.ex
index c6d2fc86eb..a67ca2dbde 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/drain_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/drain_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Upgrade.Commands.DrainCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/post_upgrade_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/post_upgrade_command.ex
index 76453ce9f3..bbab8aaabb 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/post_upgrade_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/post_upgrade_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Upgrade.Commands.PostUpgradeCommand do
alias RabbitMQ.CLI.Core.DocGuide
diff --git a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/revive_command.ex b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/revive_command.ex
index a594561a55..8568df5edd 100644
--- a/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/revive_command.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmq/cli/upgrade/commands/revive_command.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Upgrade.Commands.ReviveCommand do
@moduledoc """
diff --git a/deps/rabbitmq_cli/lib/rabbitmqctl.ex b/deps/rabbitmq_cli/lib/rabbitmqctl.ex
index 42a0f20434..3d6b4c520c 100644
--- a/deps/rabbitmq_cli/lib/rabbitmqctl.ex
+++ b/deps/rabbitmq_cli/lib/rabbitmqctl.ex
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQCtl do
alias RabbitMQ.CLI.Core.{
diff --git a/deps/rabbitmq_cli/mix.exs b/deps/rabbitmq_cli/mix.exs
index 09bbda3846..75ff435d4a 100644
--- a/deps/rabbitmq_cli/mix.exs
+++ b/deps/rabbitmq_cli/mix.exs
@@ -11,7 +11,7 @@ defmodule RabbitMQCtl.MixfileBase do
[
app: :rabbitmqctl,
version: "3.8.0-dev",
- elixir: ">= 1.10.4 and < 1.12.0",
+ elixir: ">= 1.10.4 and < 1.13.0",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
escript: [main_module: RabbitMQCtl,
@@ -36,7 +36,6 @@ defmodule RabbitMQCtl.MixfileBase do
:rabbit_event,
:rabbit_file,
:rabbit_net,
- :rabbit_lager,
:rabbit_log,
:rabbit_misc,
:rabbit_mnesia,
@@ -61,7 +60,9 @@ defmodule RabbitMQCtl.MixfileBase do
'rabbitmq-diagnostics': :diagnostics,
'rabbitmq-queues': :queues,
'rabbitmq-streams': :streams,
- 'rabbitmq-upgrade': :upgrade]]
+ 'rabbitmq-upgrade': :upgrade,
+ 'rabbitmq-tanzu': :tanzu
+ ]]
]
|> add_modules(Mix.env)
end
@@ -118,12 +119,12 @@ defmodule RabbitMQCtl.MixfileBase do
# don't have the equivalent for other methods.
defp deps() do
elixir_deps = [
- {:json, "~> 1.2.0"},
- {:csv, "~> 2.3.0"},
+ {:json, "~> 1.4.1"},
+ {:csv, "~> 2.4.0"},
{:stdout_formatter, "~> 0.2.3"},
- {:observer_cli, "~> 1.5.0"},
+ {:observer_cli, "~> 1.7.1"},
- {:amqp, "~> 1.2.0", only: :test},
+ {:amqp, "~> 2.1.0", only: :test},
{:dialyxir, "~> 0.5", only: :test, runtime: false},
{:temp, "~> 0.4", only: :test},
{:x509, "~> 0.7", only: :test}
@@ -133,8 +134,8 @@ defmodule RabbitMQCtl.MixfileBase do
nil ->
# rabbitmq_cli is built as a standalone Elixir application.
[
- {:rabbit_common, "~> 3.7.0"},
- {:amqp_client, "~> 3.7.0", only: :test}
+ {:rabbit_common, "~> 3.8.0"},
+ {:amqp_client, "~> 3.8.0", only: :test}
]
deps_dir ->
# rabbitmq_cli is built as part of RabbitMQ.
@@ -158,18 +159,6 @@ defmodule RabbitMQCtl.MixfileBase do
override: true
},
{
- :goldrush,
- path: Path.join(deps_dir, "goldrush"),
- compile: false,
- override: true
- },
- {
- :lager,
- path: Path.join(deps_dir, "lager"),
- compile: false,
- override: true
- },
- {
:amqp_client,
path: Path.join(deps_dir, "amqp_client"),
compile: false,
diff --git a/deps/rabbitmq_cli/rabbitmq-components.mk b/deps/rabbitmq_cli/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_cli/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_cli/rabbitmqctl.bzl b/deps/rabbitmq_cli/rabbitmqctl.bzl
new file mode 100644
index 0000000000..f28c1fa692
--- /dev/null
+++ b/deps/rabbitmq_cli/rabbitmqctl.bzl
@@ -0,0 +1,163 @@
+load("@bazel-erlang//:erlang_home.bzl", "ErlangHomeProvider", "ErlangVersionProvider")
+load(
+ "@bazel-erlang//:bazel_erlang_lib.bzl",
+ "BEGINS_WITH_FUN",
+ "ErlangLibInfo",
+ "QUERY_ERL_VERSION",
+ "flat_deps",
+ "path_join",
+)
+load("//:elixir_home.bzl", "ElixirHomeProvider")
+
+MIX_DEPS_DIR = "mix_deps"
+
+def _impl(ctx):
+ erlang_version = ctx.attr._erlang_version[ErlangVersionProvider].version
+ erlang_home = ctx.attr._erlang_home[ErlangHomeProvider].path
+ elixir_home = ctx.attr._elixir_home[ElixirHomeProvider].path
+
+ escript = ctx.actions.declare_file(path_join("escript", "rabbitmqctl"))
+ ebin = ctx.actions.declare_directory("ebin")
+
+ copy_compiled_deps_commands = []
+ copy_compiled_deps_commands.append("mkdir ${{MIX_INVOCATION_DIR}}/{}".format(MIX_DEPS_DIR))
+ for dep in ctx.attr.deps:
+ lib_info = dep[ErlangLibInfo]
+ if lib_info.erlang_version != erlang_version:
+ fail("Mismatched erlang versions", erlang_version, lib_info.erlang_version)
+
+ dest_dir = path_join("${MIX_INVOCATION_DIR}", MIX_DEPS_DIR, lib_info.lib_name)
+ copy_compiled_deps_commands.append(
+ "mkdir {}".format(dest_dir),
+ )
+ copy_compiled_deps_commands.append(
+ "mkdir {}".format(path_join(dest_dir, "include")),
+ )
+ copy_compiled_deps_commands.append(
+ "mkdir {}".format(path_join(dest_dir, "ebin")),
+ )
+ for hdr in lib_info.include:
+ copy_compiled_deps_commands.append(
+ "cp ${{PWD}}/{source} {target}".format(
+ source = hdr.path,
+ target = path_join(dest_dir, "include", hdr.basename),
+ ),
+ )
+ for beam in lib_info.beam:
+ copy_compiled_deps_commands.append(
+ "cp ${{PWD}}/{source} {target}".format(
+ source = beam.path,
+ target = path_join(dest_dir, "ebin", beam.basename),
+ ),
+ )
+
+ mix_invocation_dir = ctx.actions.declare_directory("{}_mix".format(ctx.label.name))
+
+ package_dir = ctx.label.package
+ if ctx.label.workspace_root != "":
+ package_dir = path_join(ctx.label.workspace_root, package_dir)
+
+ script = """
+ set -euo pipefail
+
+ export LANG="en_US.UTF-8"
+ export LC_ALL="en_US.UTF-8"
+
+ export PATH={elixir_home}/bin:{erlang_home}/bin:${{PATH}}
+
+ MIX_INVOCATION_DIR="{mix_invocation_dir}"
+
+ cp -R ${{PWD}}/{package_dir}/config ${{MIX_INVOCATION_DIR}}/config
+ # cp -R ${{PWD}}/{package_dir}/include ${{MIX_INVOCATION_DIR}}/include # rabbitmq_cli's include directory is empty
+ cp -R ${{PWD}}/{package_dir}/lib ${{MIX_INVOCATION_DIR}}/lib
+ cp ${{PWD}}/{package_dir}/mix.exs ${{MIX_INVOCATION_DIR}}/mix.exs
+
+ {copy_compiled_deps_command}
+
+ cd ${{MIX_INVOCATION_DIR}}
+ export HOME=${{PWD}}
+
+ {begins_with_fun}
+ V=$({query_erlang_version})
+ if ! beginswith "{erlang_version}" "$V"; then
+ echo "Erlang version mismatch (Expected {erlang_version}, found $V)"
+ exit 1
+ fi
+
+ export DEPS_DIR={mix_deps_dir}
+ export ERL_COMPILER_OPTIONS=deterministic
+ mix local.hex --force
+ mix local.rebar --force
+ mix make_all
+
+ cd ${{OLDPWD}}
+ cp ${{MIX_INVOCATION_DIR}}/escript/rabbitmqctl {escript_path}
+
+ mkdir -p {ebin_dir}
+ mv ${{MIX_INVOCATION_DIR}}/_build/dev/lib/rabbitmqctl/ebin/* {ebin_dir}
+ mv ${{MIX_INVOCATION_DIR}}/_build/dev/lib/rabbitmqctl/consolidated/* {ebin_dir}
+
+ rm -dR ${{MIX_INVOCATION_DIR}}
+ mkdir ${{MIX_INVOCATION_DIR}}
+ touch ${{MIX_INVOCATION_DIR}}/placeholder
+ """.format(
+ begins_with_fun = BEGINS_WITH_FUN,
+ query_erlang_version = QUERY_ERL_VERSION,
+ erlang_version = erlang_version,
+ erlang_home = erlang_home,
+ elixir_home = elixir_home,
+ mix_invocation_dir = mix_invocation_dir.path,
+ package_dir = package_dir,
+ copy_compiled_deps_command = " && ".join(copy_compiled_deps_commands),
+ mix_deps_dir = MIX_DEPS_DIR,
+ escript_path = escript.path,
+ ebin_dir = ebin.path,
+ )
+
+ inputs = []
+ inputs.extend(ctx.files.srcs)
+ for dep in ctx.attr.deps:
+ lib_info = dep[ErlangLibInfo]
+ inputs.extend(lib_info.include)
+ inputs.extend(lib_info.beam)
+
+ ctx.actions.run_shell(
+ inputs = inputs,
+ outputs = [escript, ebin, mix_invocation_dir],
+ command = script,
+ mnemonic = "MIX",
+ )
+
+ deps = flat_deps(ctx.attr.deps)
+
+ runfiles = ctx.runfiles([ebin])
+ for dep in deps:
+ runfiles = runfiles.merge(dep[DefaultInfo].default_runfiles)
+
+ return [
+ DefaultInfo(
+ executable = escript,
+ files = depset([ebin]),
+ runfiles = runfiles,
+ ),
+ ErlangLibInfo(
+ lib_name = ctx.attr.name,
+ erlang_version = erlang_version,
+ include = [],
+ beam = [ebin],
+ priv = [],
+ deps = deps,
+ ),
+ ]
+
+rabbitmqctl = rule(
+ implementation = _impl,
+ attrs = {
+ "srcs": attr.label_list(allow_files = True),
+ "deps": attr.label_list(providers = [ErlangLibInfo]),
+ "_erlang_version": attr.label(default = "@bazel-erlang//:erlang_version"),
+ "_erlang_home": attr.label(default = "@bazel-erlang//:erlang_home"),
+ "_elixir_home": attr.label(default = "//:elixir_home"),
+ },
+ executable = True,
+)
diff --git a/deps/rabbitmq_cli/rabbitmqctl_test.bzl b/deps/rabbitmq_cli/rabbitmqctl_test.bzl
new file mode 100644
index 0000000000..2f91a6e64e
--- /dev/null
+++ b/deps/rabbitmq_cli/rabbitmqctl_test.bzl
@@ -0,0 +1,151 @@
+load("@bazel-erlang//:erlang_home.bzl", "ErlangHomeProvider", "ErlangVersionProvider")
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "BEGINS_WITH_FUN", "ErlangLibInfo", "QUERY_ERL_VERSION", "path_join")
+load("@bazel-erlang//:ct.bzl", "code_paths")
+load("//:elixir_home.bzl", "ElixirHomeProvider")
+load(":rabbitmqctl.bzl", "MIX_DEPS_DIR")
+
+def _lib_dirs(dep):
+ return [path_join(p, "..") for p in code_paths(dep)]
+
+def _impl(ctx):
+ erlang_version = ctx.attr._erlang_version[ErlangVersionProvider].version
+ erlang_home = ctx.attr._erlang_home[ErlangHomeProvider].path
+ elixir_home = ctx.attr._elixir_home[ElixirHomeProvider].path
+
+ copy_compiled_deps_commands = []
+ copy_compiled_deps_commands.append("mkdir ${{TEST_UNDECLARED_OUTPUTS_DIR}}/{}".format(MIX_DEPS_DIR))
+ for dep in ctx.attr.deps:
+ lib_info = dep[ErlangLibInfo]
+ if lib_info.erlang_version != erlang_version:
+ fail("Mismatched erlang versions", erlang_version, lib_info.erlang_version)
+
+ dest_dir = path_join("${TEST_UNDECLARED_OUTPUTS_DIR}", MIX_DEPS_DIR, lib_info.lib_name)
+ copy_compiled_deps_commands.append(
+ "mkdir {}".format(dest_dir),
+ )
+ copy_compiled_deps_commands.append(
+ "mkdir {}".format(path_join(dest_dir, "include")),
+ )
+ copy_compiled_deps_commands.append(
+ "mkdir {}".format(path_join(dest_dir, "ebin")),
+ )
+ for hdr in lib_info.include:
+ copy_compiled_deps_commands.append(
+ "cp ${{PWD}}/{source} {target}".format(
+ source = hdr.short_path,
+ target = path_join(dest_dir, "include", hdr.basename),
+ ),
+ )
+ for beam in lib_info.beam:
+ copy_compiled_deps_commands.append(
+ "cp ${{PWD}}/{source} {target}".format(
+ source = beam.short_path,
+ target = path_join(dest_dir, "ebin", beam.basename),
+ ),
+ )
+
+ erl_libs = ":".join(
+ [path_join("${TEST_SRCDIR}/${TEST_WORKSPACE}", d) for dep in ctx.attr.deps for d in _lib_dirs(dep)],
+ )
+
+ script = """
+ set -euo pipefail
+
+ export LANG="en_US.UTF-8"
+ export LC_ALL="en_US.UTF-8"
+
+ export PATH={elixir_home}/bin:{erlang_home}/bin:${{PATH}}
+
+ INITIAL_DIR=${{PWD}}
+
+ ln -s ${{PWD}}/{package_dir}/config ${{TEST_UNDECLARED_OUTPUTS_DIR}}
+ # ln -s ${{PWD}}/{package_dir}/include ${{TEST_UNDECLARED_OUTPUTS_DIR}}
+ ln -s ${{PWD}}/{package_dir}/lib ${{TEST_UNDECLARED_OUTPUTS_DIR}}
+ ln -s ${{PWD}}/{package_dir}/test ${{TEST_UNDECLARED_OUTPUTS_DIR}}
+ ln -s ${{PWD}}/{package_dir}/mix.exs ${{TEST_UNDECLARED_OUTPUTS_DIR}}
+
+ {copy_compiled_deps_command}
+
+ cd ${{TEST_UNDECLARED_OUTPUTS_DIR}}
+
+ export HOME=${{PWD}}
+
+ {begins_with_fun}
+ V=$({query_erlang_version})
+ if ! beginswith "{erlang_version}" "$V"; then
+ echo "Erlang version mismatch (Expected {erlang_version}, found $V)"
+ exit 1
+ fi
+
+ export DEPS_DIR={mix_deps_dir}
+ export ERL_COMPILER_OPTIONS=deterministic
+ mix local.hex --force
+ mix local.rebar --force
+ mix make_deps
+
+ # due to https://github.com/elixir-lang/elixir/issues/7699 we
+ # "run" the tests, but skip them all, in order to trigger
+ # compilation of all *_test.exs files before we actually run them
+ mix test --exclude test
+
+ export TEST_TMPDIR=${{TEST_UNDECLARED_OUTPUTS_DIR}}
+
+ # we need a running broker with certain plugins for this to pass
+ trap 'catch $?' EXIT
+ catch() {{
+ pid=$(cat ${{TEST_TMPDIR}}/*/*.pid)
+ kill -TERM "${{pid}}"
+ }}
+ cd ${{INITIAL_DIR}}
+ ./{rabbitmq_run_cmd} start-background-broker
+ cd ${{TEST_UNDECLARED_OUTPUTS_DIR}}
+
+ # The test cases will need to be able to load code from the deps
+ # directly, so we set ERL_LIBS
+ export ERL_LIBS={erl_libs}
+
+ # run the actual tests
+ mix test --trace --max-failures 1
+ """.format(
+ begins_with_fun = BEGINS_WITH_FUN,
+ query_erlang_version = QUERY_ERL_VERSION,
+ erlang_version = erlang_version,
+ erlang_home = erlang_home,
+ elixir_home = elixir_home,
+ package_dir = ctx.label.package,
+ copy_compiled_deps_command = " && ".join(copy_compiled_deps_commands),
+ mix_deps_dir = MIX_DEPS_DIR,
+ erl_libs = erl_libs,
+ rabbitmq_run_cmd = ctx.attr.rabbitmq_run[DefaultInfo].files_to_run.executable.short_path,
+ )
+
+ ctx.actions.write(
+ output = ctx.outputs.executable,
+ content = script,
+ )
+
+ runfiles = ctx.runfiles(ctx.files.srcs)
+ runfiles = runfiles.merge(ctx.runfiles(ctx.files.data))
+ for dep in ctx.attr.deps:
+ lib_info = dep[ErlangLibInfo]
+ runfiles = runfiles.merge(ctx.runfiles(lib_info.include + lib_info.beam))
+ runfiles = runfiles.merge(ctx.attr.rabbitmq_run[DefaultInfo].default_runfiles)
+
+ return [DefaultInfo(runfiles = runfiles)]
+
+rabbitmqctl_test = rule(
+ implementation = _impl,
+ attrs = {
+ "srcs": attr.label_list(allow_files = [".ex", ".exs"]),
+ "data": attr.label_list(allow_files = True),
+ "deps": attr.label_list(providers = [ErlangLibInfo]),
+ "rabbitmq_run": attr.label(
+ executable = True,
+ cfg = "target",
+ ),
+ "_erlang_version": attr.label(default = "@bazel-erlang//:erlang_version"),
+ "_erlang_home": attr.label(default = "@bazel-erlang//:erlang_home"),
+ "_elixir_home": attr.label(default = "//:elixir_home"),
+ },
+ test = True,
+)
diff --git a/deps/rabbitmq_cli/test/ctl/clear_global_parameter_command_test.exs b/deps/rabbitmq_cli/test/ctl/clear_global_parameter_command_test.exs
index adadc3c223..425911ad18 100644
--- a/deps/rabbitmq_cli/test/ctl/clear_global_parameter_command_test.exs
+++ b/deps/rabbitmq_cli/test/ctl/clear_global_parameter_command_test.exs
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule ClearGlobalParameterCommandTest do
@@ -15,7 +15,7 @@ defmodule ClearGlobalParameterCommandTest do
setup_all do
RabbitMQ.CLI.Core.Distribution.start()
-
+
:ok
end
diff --git a/deps/rabbitmq_cli/test/ctl/clear_policy_command_test.exs b/deps/rabbitmq_cli/test/ctl/clear_policy_command_test.exs
index f36f65d25f..2a38d5c50b 100644
--- a/deps/rabbitmq_cli/test/ctl/clear_policy_command_test.exs
+++ b/deps/rabbitmq_cli/test/ctl/clear_policy_command_test.exs
@@ -2,7 +2,7 @@
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
defmodule ClearPolicyCommandTest do
@@ -17,7 +17,7 @@ defmodule ClearPolicyCommandTest do
setup_all do
RabbitMQ.CLI.Core.Distribution.start()
-
+
add_vhost @vhost
enable_federation_plugin()
diff --git a/deps/rabbitmq_cli/test/ctl/close_all_user_connections_command_test.exs b/deps/rabbitmq_cli/test/ctl/close_all_user_connections_command_test.exs
new file mode 100644
index 0000000000..5d75fc7931
--- /dev/null
+++ b/deps/rabbitmq_cli/test/ctl/close_all_user_connections_command_test.exs
@@ -0,0 +1,90 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+
+defmodule CloseAllUserConnectionsCommandTest do
+ use ExUnit.Case, async: false
+ import TestHelper
+
+ @command RabbitMQ.CLI.Ctl.Commands.CloseAllUserConnectionsCommand
+
+ @vhost "/"
+
+ setup_all do
+ RabbitMQ.CLI.Core.Distribution.start()
+
+ close_all_connections(get_rabbit_hostname())
+
+ on_exit([], fn ->
+ close_all_connections(get_rabbit_hostname())
+ end)
+
+ :ok
+ end
+
+ setup context do
+ {:ok, opts: %{
+ node: get_rabbit_hostname(),
+ timeout: context[:test_timeout] || 30000
+ }}
+ end
+
+ test "validate: with an invalid number of arguments returns an arg count error", context do
+ assert @command.validate(["username", "explanation", "extra"], context[:opts]) ==
+ {:validation_failure, :too_many_args}
+
+ assert @command.validate(["username"], context[:opts]) ==
+ {:validation_failure, :not_enough_args}
+ end
+
+ test "validate: with the correct number of arguments returns ok", context do
+ assert @command.validate(["username", "test"], context[:opts]) == :ok
+ end
+
+ test "run: a close connections request on a user with open connections", context do
+ Application.ensure_all_started(:amqp)
+ # open a localhost connection with default username
+ {:ok, _conn} = AMQP.Connection.open(virtual_host: @vhost)
+
+ await_condition(fn ->
+ conns = fetch_user_connections("guest", context)
+ length(conns) > 0
+ end, 10000)
+
+ # make sure there is a connection to close
+ conns = fetch_user_connections("guest", context)
+ assert length(conns) > 0
+
+ # make sure closing yeti's connections doesn't affect guest's connections
+ assert :ok == @command.run(["yeti", "test"], context[:opts])
+ Process.sleep(500)
+ conns = fetch_user_connections("guest", context)
+ assert length(conns) > 0
+
+ # finally, make sure we can close guest's connections
+ assert :ok == @command.run(["guest", "test"], context[:opts])
+ await_condition(fn ->
+ conns = fetch_user_connections("guest", context)
+ length(conns) == 0
+ end, 10000)
+
+ conns = fetch_user_connections("guest", context)
+ assert length(conns) == 0
+ end
+
+ test "run: a close connections request on for a non existing user returns successfully", context do
+ assert match?(
+ :ok,
+ @command.run(["yeti", "test"], context[:opts])
+ )
+ end
+
+ test "banner", context do
+ s = @command.banner(["username", "some reason"], context[:opts])
+ assert s =~ ~r/Closing connections/
+ assert s =~ ~r/user username/
+ assert s =~ ~r/reason: some reason/
+ end
+end
diff --git a/deps/rabbitmq_cli/test/ctl/resume_listeners_command_test.exs b/deps/rabbitmq_cli/test/ctl/resume_listeners_command_test.exs
index 3aad0b355b..9a37638ca5 100644
--- a/deps/rabbitmq_cli/test/ctl/resume_listeners_command_test.exs
+++ b/deps/rabbitmq_cli/test/ctl/resume_listeners_command_test.exs
@@ -1,17 +1,9 @@
-## The contents of this file are subject to the Mozilla Public License
-## Version 1.1 (the "License"); you may not use this file except in
-## compliance with the License. You may obtain a copy of the License
-## at https://www.mozilla.org/MPL/
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Software distributed under the License is distributed on an "AS IS"
-## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-## the License for the specific language governing rights and
-## limitations under the License.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
##
-## The Original Code is RabbitMQ.
-##
-## The Initial Developer of the Original Code is GoPivotal, Inc.
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
defmodule ResumeListenersCommandTest do
use ExUnit.Case, async: false
diff --git a/deps/rabbitmq_cli/test/ctl/rotate_logs_command_test.exs b/deps/rabbitmq_cli/test/ctl/rotate_logs_command_test.exs
deleted file mode 100644
index 13eed87d43..0000000000
--- a/deps/rabbitmq_cli/test/ctl/rotate_logs_command_test.exs
+++ /dev/null
@@ -1,40 +0,0 @@
-## This Source Code Form is subject to the terms of the Mozilla Public
-## License, v. 2.0. If a copy of the MPL was not distributed with this
-## file, You can obtain one at https://mozilla.org/MPL/2.0/.
-##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-
-
-defmodule RotateLogsCommandTest do
- use ExUnit.Case, async: false
- import TestHelper
-
- @command RabbitMQ.CLI.Ctl.Commands.RotateLogsCommand
-
- setup_all do
- RabbitMQ.CLI.Core.Distribution.start()
-
- :ok
- end
-
- setup do
- {:ok, opts: %{node: get_rabbit_hostname()}}
- end
-
- test "validate: with extra arguments returns an arg count error", context do
- assert @command.validate(["extra"], context[:opts]) == {:validation_failure, :too_many_args}
- end
-
- test "run: request to a named, active node succeeds", context do
- assert @command.run([], context[:opts]) == :ok
- end
-
- test "run: request to a non-existent node returns a badrpc" do
- opts = %{node: :jake@thedog, timeout: 200}
- assert match?({:badrpc, _}, @command.run([], opts))
- end
-
- test "banner", context do
- assert @command.banner([], context[:opts]) =~ ~r/Rotating logs for node #{get_rabbit_hostname()}/
- end
-end
diff --git a/deps/rabbitmq_cli/test/ctl/set_user_tags_command_test.exs b/deps/rabbitmq_cli/test/ctl/set_user_tags_command_test.exs
index cdc51e673f..74f9b2a3f2 100644
--- a/deps/rabbitmq_cli/test/ctl/set_user_tags_command_test.exs
+++ b/deps/rabbitmq_cli/test/ctl/set_user_tags_command_test.exs
@@ -40,10 +40,10 @@ defmodule SetUserTagsCommandTest do
test "run: throws a badrpc when instructed to contact an unreachable RabbitMQ node" do
opts = %{node: :jake@thedog, timeout: 200}
- assert match?({:badrpc, _}, @command.run([@user, :imperator], opts))
+ assert match?({:badrpc, _}, @command.run([@user, :emperor], opts))
end
- @tag user: @user, tags: [:imperator]
+ @tag user: @user, tags: [:emperor]
test "run: on a single optional argument, add a flag to the user", context do
@command.run(
[context[:user] | context[:tags]],
@@ -58,16 +58,16 @@ defmodule SetUserTagsCommandTest do
assert result[:tags] == context[:tags]
end
- @tag user: "interloper", tags: [:imperator]
- test "run: on an invalid user, get a no such user error", context do
+ @tag user: "interloper", tags: [:emperor]
+ test "run: when user does not exist, returns an error", context do
assert @command.run(
[context[:user] | context[:tags]],
context[:opts]
) == {:error, {:no_such_user, context[:user]}}
end
- @tag user: @user, tags: [:imperator, :generalissimo]
- test "run: on multiple optional arguments, add all flags to the user", context do
+ @tag user: @user, tags: [:emperor, :generalissimo]
+ test "run: with multiple optional arguments, adds multiple tags", context do
@command.run(
[context[:user] | context[:tags]],
context[:opts]
@@ -81,8 +81,8 @@ defmodule SetUserTagsCommandTest do
assert result[:tags] == context[:tags]
end
- @tag user: @user, tags: [:imperator]
- test "run: with no optional arguments, clear user tags", context do
+ @tag user: @user, tags: [:emperor]
+ test "run: without optional arguments, clears user tags", context do
set_user_tags(context[:user], context[:tags])
@@ -96,7 +96,7 @@ defmodule SetUserTagsCommandTest do
assert result[:tags] == []
end
- @tag user: @user, tags: [:imperator]
+ @tag user: @user, tags: [:emperor]
test "run: identical calls are idempotent", context do
set_user_tags(context[:user], context[:tags])
@@ -114,8 +114,8 @@ defmodule SetUserTagsCommandTest do
assert result[:tags] == context[:tags]
end
- @tag user: @user, old_tags: [:imperator], new_tags: [:generalissimo]
- test "run: if different tags exist, overwrite them", context do
+ @tag user: @user, old_tags: [:emperor], new_tags: [:generalissimo]
+ test "run: overwrites existing tags", context do
set_user_tags(context[:user], context[:old_tags])
diff --git a/deps/rabbitmq_cli/test/ctl/set_vhost_tags_command_test.exs b/deps/rabbitmq_cli/test/ctl/set_vhost_tags_command_test.exs
new file mode 100644
index 0000000000..44cde783c9
--- /dev/null
+++ b/deps/rabbitmq_cli/test/ctl/set_vhost_tags_command_test.exs
@@ -0,0 +1,139 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+
+
+defmodule SetVhostTagsCommandTest do
+ use ExUnit.Case, async: false
+ import TestHelper
+
+ @command RabbitMQ.CLI.Ctl.Commands.SetVhostTagsCommand
+
+ @vhost "vhost99-tests"
+
+ setup_all do
+ RabbitMQ.CLI.Core.Distribution.start()
+
+ add_vhost(@vhost)
+
+ on_exit([], fn ->
+ delete_vhost(@vhost)
+ end)
+
+ :ok
+ end
+
+ setup context do
+ add_vhost(context[:vhost])
+ on_exit([], fn -> delete_vhost(context[:vhost]) end)
+
+ {:ok, opts: %{node: get_rabbit_hostname()}}
+ end
+
+ test "validate: on an incorrect number of arguments, returns an error" do
+ assert @command.validate([], %{}) == {:validation_failure, :not_enough_args}
+ end
+
+ test "run: throws a badrpc when instructed to contact an unreachable RabbitMQ node" do
+ opts = %{node: :jake@thedog, timeout: 200}
+
+ assert match?({:badrpc, _}, @command.run([@vhost, :qa], opts))
+ end
+
+ @tag vhost: @vhost
+ test "run: with a single optional argument, adds a single tag", context do
+ @command.run([context[:vhost], :qa], context[:opts])
+
+ result = Enum.find(
+ list_vhosts(),
+ fn(record) -> record[:vhost] == context[:vhost] end
+ )
+
+ assert result[:tags] == context[:tags]
+ end
+
+ @tag vhost: "non/ex1st3nT"
+ test "run: when virtual host does not exist, reports an error", context do
+ delete_vhost(context[:vhost])
+
+ assert @command.run(
+ [context[:vhost]],
+ context[:opts]
+ ) == {:error, {:no_such_vhost, context[:vhost]}}
+ end
+
+ @tag user: @vhost, tags: [:qa, :limited]
+ test "run: with multiple optional arguments, adds multiple tags", context do
+ @command.run(
+ [context[:vhost] | context[:tags]],
+ context[:opts]
+ )
+
+ result = Enum.find(
+ list_vhosts(),
+ fn(record) -> record[:vhost] == context[:vhost] end
+ )
+
+ assert result[:tags] == context[:tags]
+ end
+
+ @tag user: @vhost, tags: [:qa]
+ test "run: with no optional arguments, clears virtual host tags", context do
+ set_vhost_tags(context[:vhost], context[:tags])
+
+ @command.run([context[:vhost]], context[:opts])
+
+ result = Enum.find(
+ list_vhosts(),
+ fn(record) -> record[:vhost] == context[:vhost] end
+ )
+
+ assert result[:tags] == []
+ end
+
+ @tag user: @vhost, tags: [:qa]
+ test "run: identical calls are idempotent", context do
+ set_vhost_tags(context[:vhost], context[:tags])
+
+ assert @command.run(
+ [context[:vhost] | context[:tags]],
+ context[:opts]
+ ) == :ok
+
+ result = Enum.find(
+ list_vhosts(),
+ fn(record) -> record[:vhost] == context[:vhost] end
+ )
+
+ assert result[:tags] == context[:tags]
+ end
+
+ @tag user: @vhost, old_tags: [:qa], new_tags: [:limited]
+ test "run: overwrites existing tags them", context do
+ set_vhost_tags(context[:vhost], context[:old_tags])
+
+ assert @command.run(
+ [context[:vhost] | context[:new_tags]],
+ context[:opts]
+ ) == :ok
+
+ result = Enum.find(
+ list_vhosts(),
+ fn(record) -> record[:vhost] == context[:vhost] end
+ )
+
+ assert result[:tags] == context[:new_tags]
+ end
+
+ @tag user: @vhost, tags: ["abc"]
+ test "banner", context do
+ assert @command.banner(
+ [context[:vhost] | context[:tags]],
+ context[:opts]
+ )
+ =~ ~r/Setting tags for virtual host \"#{context[:vhost]}\" to \[#{context[:tags]}\] \.\.\./
+ end
+
+end
diff --git a/deps/rabbitmq_cli/test/ctl/suspend_listeners_command_test.exs b/deps/rabbitmq_cli/test/ctl/suspend_listeners_command_test.exs
index 602cdf9f8b..cd4a5e602a 100644
--- a/deps/rabbitmq_cli/test/ctl/suspend_listeners_command_test.exs
+++ b/deps/rabbitmq_cli/test/ctl/suspend_listeners_command_test.exs
@@ -1,17 +1,9 @@
-## The contents of this file are subject to the Mozilla Public License
-## Version 1.1 (the "License"); you may not use this file except in
-## compliance with the License. You may obtain a copy of the License
-## at https://www.mozilla.org/MPL/
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Software distributed under the License is distributed on an "AS IS"
-## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-## the License for the specific language governing rights and
-## limitations under the License.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
##
-## The Original Code is RabbitMQ.
-##
-## The Initial Developer of the Original Code is GoPivotal, Inc.
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
defmodule SuspendListenersCommandTest do
use ExUnit.Case, async: false
diff --git a/deps/rabbitmq_cli/test/diagnostics/log_location_command_test.exs b/deps/rabbitmq_cli/test/diagnostics/log_location_command_test.exs
index 64a85fc519..4700b96a0f 100644
--- a/deps/rabbitmq_cli/test/diagnostics/log_location_command_test.exs
+++ b/deps/rabbitmq_cli/test/diagnostics/log_location_command_test.exs
@@ -50,49 +50,26 @@ defmodule LogLocationCommandTest do
end
test "run: prints default log location", context do
- # Let Lager's log message rate lapse or else some messages
- # we assert on might be dropped. MK.
- Process.sleep(1000)
{:ok, logfile} = @command.run([], context[:opts])
log_message = "file location"
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [log_message])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist(log_message)])
wait_for_log_message(log_message, logfile)
{:ok, log_file_data} = File.read(logfile)
assert String.match?(log_file_data, Regex.compile!(log_message))
end
test "run: shows all log locations", context do
- # Let Lager's log message rate lapse or else some messages
- # we assert on might be dropped. MK.
- Process.sleep(1000)
# This assumes default configuration
- [logfile, upgrade_log_file] =
+ [logfile, upgrade_log_file | _] =
@command.run([], Map.merge(context[:opts], %{all: true}))
log_message = "checking the default log file when checking all"
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [log_message])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist(log_message)])
wait_for_log_message(log_message, logfile)
log_message_upgrade = "checking the upgrade log file when checking all"
:rpc.call(get_rabbit_hostname(),
- :rabbit_log, :log, [:upgrade, :error, log_message_upgrade, []])
+ :rabbit_log, :log, [:upgrade, :error, to_charlist(log_message_upgrade), []])
wait_for_log_message(log_message_upgrade, upgrade_log_file)
end
-
- test "run: fails if there is no log file configured", context do
- {:ok, upgrade_file} = :rpc.call(get_rabbit_hostname(), :application, :get_env, [:rabbit, :lager_upgrade_file])
- {:ok, default_file} = :rpc.call(get_rabbit_hostname(), :application, :get_env, [:rabbit, :lager_default_file])
- on_exit([], fn ->
- :rpc.call(get_rabbit_hostname(), :application, :set_env, [:rabbit, :lager_upgrade_file, upgrade_file])
- :rpc.call(get_rabbit_hostname(), :application, :set_env, [:rabbit, :lager_default_file, default_file])
- :rpc.call(get_rabbit_hostname(), :rabbit_lager, :configure_lager, [])
- start_rabbitmq_app()
- end)
- stop_rabbitmq_app()
- :rpc.call(get_rabbit_hostname(), :application, :unset_env, [:rabbit, :lager_upgrade_file])
- :rpc.call(get_rabbit_hostname(), :application, :unset_env, [:rabbit, :lager_default_file])
- :rpc.call(get_rabbit_hostname(), :application, :unset_env, [:rabbit, :log])
- :rpc.call(get_rabbit_hostname(), :rabbit_lager, :configure_lager, [])
- {:error, "No log files configured on the node"} = @command.run([], context[:opts])
- end
end
diff --git a/deps/rabbitmq_cli/test/diagnostics/log_tail_command_test.exs b/deps/rabbitmq_cli/test/diagnostics/log_tail_command_test.exs
index fb19821d55..c9502ca6d5 100644
--- a/deps/rabbitmq_cli/test/diagnostics/log_tail_command_test.exs
+++ b/deps/rabbitmq_cli/test/diagnostics/log_tail_command_test.exs
@@ -50,15 +50,12 @@ defmodule LogTailCommandTest do
end
test "run: shows last 50 lines from the log by default", context do
- # Let Lager's log message rate lapse or else some messages
- # we assert on might be dropped. MK.
- Process.sleep(1000)
clear_log_files()
log_messages =
Enum.map(:lists.seq(1, 50),
fn(n) ->
message = "Getting log tail #{n}"
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [message])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist(message)])
message
end)
wait_for_log_message("Getting log tail 50")
@@ -72,15 +69,11 @@ defmodule LogTailCommandTest do
end
test "run: returns N lines", context do
- # Let Lager's log message rate lapse or else some messages
- # we assert on might be dropped. MK.
- Process.sleep(1000)
-
## Log a bunch of lines
Enum.map(:lists.seq(1, 50),
fn(n) ->
message = "More lines #{n}"
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [message])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist(message)])
message
end)
wait_for_log_message("More lines 50")
@@ -90,15 +83,12 @@ defmodule LogTailCommandTest do
end
test "run: may return less than N lines if N is high", context do
- # Let Lager's log message rate lapse or else some messages
- # we assert on might be dropped. MK.
- Process.sleep(1000)
clear_log_files()
## Log a bunch of lines
Enum.map(:lists.seq(1, 100),
fn(n) ->
message = "More lines #{n}"
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [message])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist(message)])
message
end)
wait_for_log_message("More lines 50")
@@ -107,9 +97,12 @@ defmodule LogTailCommandTest do
end
def clear_log_files() do
- [_|_] = logs = :rpc.call(get_rabbit_hostname(), :rabbit_lager, :log_locations, [])
+ [_|_] = logs = :rpc.call(get_rabbit_hostname(), :rabbit, :log_locations, [])
Enum.map(logs, fn(log) ->
- File.write(log, "")
+ case log do
+ '<stdout>' -> :ok
+ _ -> File.write(log, "")
+ end
end)
end
end
diff --git a/deps/rabbitmq_cli/test/diagnostics/log_tail_stream_command_test.exs b/deps/rabbitmq_cli/test/diagnostics/log_tail_stream_command_test.exs
index 4ad2785604..e0e4dc06f6 100644
--- a/deps/rabbitmq_cli/test/diagnostics/log_tail_stream_command_test.exs
+++ b/deps/rabbitmq_cli/test/diagnostics/log_tail_stream_command_test.exs
@@ -54,10 +54,10 @@ defmodule LogTailStreamCommandTest do
time_before = System.system_time(:second)
stream = @command.run([], Map.merge(context[:opts], %{duration: 15}))
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, ["Message"])
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, ["Message1"])
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, ["Message2"])
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, ["Message3"])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist("Message")])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist("Message1")])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist("Message2")])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist("Message3")])
# This may take a long time and fail with an ExUnit timeout
data = Enum.join(stream)
@@ -81,7 +81,7 @@ defmodule LogTailStreamCommandTest do
end
def ensure_log_file() do
- [log|_] = :rpc.call(get_rabbit_hostname(), :rabbit_lager, :log_locations, [])
+ [log|_] = :rpc.call(get_rabbit_hostname(), :rabbit, :log_locations, [])
ensure_file(log, 100)
end
@@ -92,14 +92,14 @@ defmodule LogTailStreamCommandTest do
case File.exists?(log) do
true -> :ok
false ->
- :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, ["Ping"])
+ :rpc.call(get_rabbit_hostname(), :rabbit_log, :error, [to_charlist("Ping")])
:timer.sleep(100)
ensure_file(log, attempts - 1)
end
end
def delete_log_files() do
- [_|_] = logs = :rpc.call(get_rabbit_hostname(), :rabbit_lager, :log_locations, [])
+ [_|_] = logs = :rpc.call(get_rabbit_hostname(), :rabbit, :log_locations, [])
Enum.map(logs, fn(log) ->
File.rm(log)
end)
diff --git a/deps/rabbitmq_cli/test/diagnostics/remote_shell_command_test.exs b/deps/rabbitmq_cli/test/diagnostics/remote_shell_command_test.exs
new file mode 100644
index 0000000000..ad03cd06f4
--- /dev/null
+++ b/deps/rabbitmq_cli/test/diagnostics/remote_shell_command_test.exs
@@ -0,0 +1,40 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+
+
+defmodule RemoteShellCommandTest do
+ use ExUnit.Case, async: false
+ import TestHelper
+
+ @command RabbitMQ.CLI.Diagnostics.Commands.RemoteShellCommand
+
+ setup_all do
+ RabbitMQ.CLI.Core.Distribution.start()
+
+ :ok
+ end
+
+ setup context do
+ {:ok, opts: %{
+ node: get_rabbit_hostname(),
+ timeout: context[:test_timeout] || 15000
+ }}
+ end
+
+ test "merge_defaults: nothing to do" do
+ assert @command.merge_defaults([], %{}) == {[], %{}}
+ end
+
+ test "validate: treats positional arguments as a failure" do
+ assert @command.validate(["extra-arg"], %{}) == {:validation_failure, :too_many_args}
+ end
+
+ test "validate: treats empty positional arguments and default switches as a success" do
+ assert @command.validate([], %{}) == :ok
+ end
+
+ # Unreachable nodes will leave the remote_shell open with an error.
+end
diff --git a/deps/rabbitmq_cli/test/fixtures/plugins/plugins_with_version_requirements/mock_rabbitmq_plugin_for_3_8-0.1.0/ebin/mock_rabbitmq_plugin_for_3_8.app b/deps/rabbitmq_cli/test/fixtures/plugins/plugins_with_version_requirements/mock_rabbitmq_plugin_for_3_8-0.1.0/ebin/mock_rabbitmq_plugin_for_3_8.app
index dae70550b6..c2ca666d83 100644
--- a/deps/rabbitmq_cli/test/fixtures/plugins/plugins_with_version_requirements/mock_rabbitmq_plugin_for_3_8-0.1.0/ebin/mock_rabbitmq_plugin_for_3_8.app
+++ b/deps/rabbitmq_cli/test/fixtures/plugins/plugins_with_version_requirements/mock_rabbitmq_plugin_for_3_8-0.1.0/ebin/mock_rabbitmq_plugin_for_3_8.app
@@ -1,4 +1,4 @@
-{application, mock_rabbitmq_plugin_for_3_8, [
+{application, mock_rabbitmq_plugin_for_3_9, [
{description, "New project"},
{vsn, "0.1.0"},
{modules, ['mock_rabbitmq_plugins_01_app','mock_rabbitmq_plugins_01_sup']},
@@ -6,5 +6,5 @@
{applications, [kernel,stdlib,rabbit]},
{mod, {mock_rabbitmq_plugins_01_app, []}},
{env, []},
- {broker_version_requirements, ["3.8.0", "3.9.0"]}
+ {broker_version_requirements, ["3.8.0", "3.9.0", "3.10.0"]}
]}.
diff --git a/deps/rabbitmq_cli/test/plugins/enable_plugins_command_test.exs b/deps/rabbitmq_cli/test/plugins/enable_plugins_command_test.exs
index 09aaf38351..5ac976fb46 100644
--- a/deps/rabbitmq_cli/test/plugins/enable_plugins_command_test.exs
+++ b/deps/rabbitmq_cli/test/plugins/enable_plugins_command_test.exs
@@ -200,12 +200,12 @@ defmodule EnablePluginsCommandTest do
switch_plugins_directories(context[:opts][:plugins_dir], opts[:plugins_dir])
{:stream, _} =
- @command.run(["mock_rabbitmq_plugin_for_3_8"], opts)
- check_plugins_enabled([:mock_rabbitmq_plugin_for_3_8], context)
+ @command.run(["mock_rabbitmq_plugin_for_3_9"], opts)
+ check_plugins_enabled([:mock_rabbitmq_plugin_for_3_9], context)
# Not changed
{:error, _version_error} = @command.run(["mock_rabbitmq_plugin_for_3_7"], opts)
- check_plugins_enabled([:mock_rabbitmq_plugin_for_3_8], context)
+ check_plugins_enabled([:mock_rabbitmq_plugin_for_3_9], context)
end
diff --git a/deps/rabbitmq_cli/test/plugins/set_plugins_command_test.exs b/deps/rabbitmq_cli/test/plugins/set_plugins_command_test.exs
index 3ebc3dfc98..8229415b38 100644
--- a/deps/rabbitmq_cli/test/plugins/set_plugins_command_test.exs
+++ b/deps/rabbitmq_cli/test/plugins/set_plugins_command_test.exs
@@ -148,10 +148,10 @@ defmodule SetPluginsCommandTest do
switch_plugins_directories(context[:opts][:plugins_dir], opts[:plugins_dir])
- {:stream, _} = @command.run(["mock_rabbitmq_plugin_for_3_8"], opts)
- check_plugins_enabled([:mock_rabbitmq_plugin_for_3_8], context)
+ {:stream, _} = @command.run(["mock_rabbitmq_plugin_for_3_9"], opts)
+ check_plugins_enabled([:mock_rabbitmq_plugin_for_3_9], context)
{:error, _version_error} = @command.run(["mock_rabbitmq_plugin_for_3_7"], opts)
- check_plugins_enabled([:mock_rabbitmq_plugin_for_3_8], context)
+ check_plugins_enabled([:mock_rabbitmq_plugin_for_3_9], context)
end
end
diff --git a/deps/rabbitmq_cli/test/queues/stream_status_command_test.exs b/deps/rabbitmq_cli/test/queues/stream_status_command_test.exs
new file mode 100644
index 0000000000..edcdd6e2a2
--- /dev/null
+++ b/deps/rabbitmq_cli/test/queues/stream_status_command_test.exs
@@ -0,0 +1,45 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+
+defmodule RabbitMQ.CLI.Streams.Commands.StreamStatusCommandTest do
+ use ExUnit.Case, async: false
+ import TestHelper
+
+ @command RabbitMQ.CLI.Streams.Commands.StreamStatusCommand
+
+ setup_all do
+ RabbitMQ.CLI.Core.Distribution.start()
+
+ :ok
+ end
+
+ setup context do
+ {:ok, opts: %{
+ node: get_rabbit_hostname(),
+ timeout: context[:test_timeout] || 30000
+ }}
+ end
+
+
+ test "validate: treats no arguments as a failure" do
+ assert @command.validate([], %{}) == {:validation_failure, :not_enough_args}
+ end
+
+ test "validate: accepts a single positional argument" do
+ assert @command.validate(["stream-queue-a"], %{}) == :ok
+ end
+
+ test "validate: when two or more arguments are provided, returns a failure" do
+ assert @command.validate(["stream-queue-a", "one-extra-arg"], %{}) == {:validation_failure, :too_many_args}
+ assert @command.validate(["stream-queue-a", "extra-arg", "another-extra-arg"], %{}) == {:validation_failure, :too_many_args}
+ end
+
+ @tag test_timeout: 3000
+ test "run: targeting an unreachable node throws a badrpc" do
+ assert match?({:badrpc, _}, @command.run(["stream-queue-a"],
+ %{node: :jake@thedog, vhost: "/", timeout: 200, tracking: false}))
+ end
+end
diff --git a/deps/rabbitmq_cli/test/streams/add_replica_command_test.exs b/deps/rabbitmq_cli/test/streams/add_replica_command_test.exs
index cffcd2e34d..1377fd661b 100644
--- a/deps/rabbitmq_cli/test/streams/add_replica_command_test.exs
+++ b/deps/rabbitmq_cli/test/streams/add_replica_command_test.exs
@@ -1,16 +1,9 @@
-## The contents of this file are subject to the Mozilla Public License
-## Version 1.1 (the "License"); you may not use this file except in
-## compliance with the License. You may obtain a copy of the License
-## at https://www.mozilla.org/MPL/
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Software distributed under the License is distributed on an "AS IS"
-## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-## the License for the specific language governing rights and
-## limitations under the License.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
##
-## The Original Code is RabbitMQ.
-##
-## Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Streams.Commands.AddReplicaCommandTest do
use ExUnit.Case, async: false
diff --git a/deps/rabbitmq_cli/test/streams/delete_replica_command_test.exs b/deps/rabbitmq_cli/test/streams/delete_replica_command_test.exs
index cf6bcbe20d..b7fe17edb0 100644
--- a/deps/rabbitmq_cli/test/streams/delete_replica_command_test.exs
+++ b/deps/rabbitmq_cli/test/streams/delete_replica_command_test.exs
@@ -1,16 +1,9 @@
-## The contents of this file are subject to the Mozilla Public License
-## Version 1.1 (the "License"); you may not use this file except in
-## compliance with the License. You may obtain a copy of the License
-## at https://www.mozilla.org/MPL/
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Software distributed under the License is distributed on an "AS IS"
-## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-## the License for the specific language governing rights and
-## limitations under the License.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
##
-## The Original Code is RabbitMQ.
-##
-## Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
defmodule RabbitMQ.CLI.Streams.Commands.DeleteReplicaCommandTest do
use ExUnit.Case, async: false
@@ -27,7 +20,8 @@ defmodule RabbitMQ.CLI.Streams.Commands.DeleteReplicaCommandTest do
setup context do
{:ok, opts: %{
node: get_rabbit_hostname(),
- timeout: context[:test_timeout] || 30000
+ timeout: context[:test_timeout] || 30000,
+ vhost: "/"
}}
end
@@ -49,6 +43,12 @@ defmodule RabbitMQ.CLI.Streams.Commands.DeleteReplicaCommandTest do
assert @command.validate(["stream-queue-a", "rabbit@new-node"], %{}) == :ok
end
+ test "run: trying to delete the last member of a stream should fail and return a meaningful message", context do
+ declare_stream("test_stream_1", "/")
+ assert @command.run(["test_stream_1", get_rabbit_hostname()], context[:opts]) ==
+ {:error, "Cannot delete the last member of a stream"}
+ end
+
@tag test_timeout: 3000
test "run: targeting an unreachable node throws a badrpc" do
assert match?({:badrpc, _}, @command.run(["stream-queue-a", "rabbit@new-node"],
diff --git a/deps/rabbitmq_cli/test/streams/set_stream_retention_policy_command_test.exs b/deps/rabbitmq_cli/test/streams/set_stream_retention_policy_command_test.exs
index 56f960320b..2cfbe410e1 100644
--- a/deps/rabbitmq_cli/test/streams/set_stream_retention_policy_command_test.exs
+++ b/deps/rabbitmq_cli/test/streams/set_stream_retention_policy_command_test.exs
@@ -1,17 +1,9 @@
-## The contents of this file are subject to the Mozilla Public License
-## Version 1.1 (the "License"); you may not use this file except in
-## compliance with the License. You may obtain a copy of the License
-## at https://www.mozilla.org/MPL/
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Software distributed under the License is distributed on an "AS IS"
-## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-## the License for the specific language governing rights and
-## limitations under the License.
+## Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
##
-## The Original Code is RabbitMQ.
-##
-## The Initial Developer of the Original Code is GoPivotal, Inc.
-## Copyright (c) 2007-2020 Pivotal Software, Inc. All rights reserved.
defmodule RabbitMQ.CLI.Streams.Commands.SetStreamRetentionPolicyCommandTest do
use ExUnit.Case, async: false
diff --git a/deps/rabbitmq_cli/test/test_helper.exs b/deps/rabbitmq_cli/test/test_helper.exs
index fca68e57bd..0ad6044729 100644
--- a/deps/rabbitmq_cli/test/test_helper.exs
+++ b/deps/rabbitmq_cli/test/test_helper.exs
@@ -86,8 +86,11 @@ defmodule TestHelper do
end
def set_user_tags(name, tags) do
- :rpc.call(get_rabbit_hostname(), :rabbit_auth_backend_internal, :set_tags,
- [name, tags, "acting-user"])
+ :rpc.call(get_rabbit_hostname(), :rabbit_auth_backend_internal, :set_tags, [name, tags, "acting-user"])
+ end
+
+ def set_vhost_tags(name, tags) do
+ :rpc.call(get_rabbit_hostname(), :rabbit_vhost, :update_tags, [name, tags, "acting-user"])
end
def authenticate_user(name, password) do
@@ -159,6 +162,10 @@ defmodule TestHelper do
[queue_name, durable, auto_delete, args, owner, "acting-user"])
end
+ def declare_stream(name, vhost) do
+ declare_queue(name, vhost, true, false, [{"x-queue-type", :longstr, "stream"}])
+ end
+
def delete_queue(name, vhost) do
queue_name = :rabbit_misc.r(vhost, :queue, name)
:rpc.call(get_rabbit_hostname(),
@@ -376,6 +383,14 @@ defmodule TestHelper do
end
end
+ def fetch_user_connections(username, context) do
+ node = Helpers.normalise_node(context[:node], :shortnames)
+
+ :rabbit_misc.rpc_call(node, :rabbit_connection_tracking, :list_of_user, [
+ username
+ ])
+ end
+
def close_all_connections(node) do
# we intentionally use connections_local/0 here because connections/0,
# the cluster-wide version, loads some bits around cluster membership
@@ -600,7 +615,7 @@ defmodule TestHelper do
## Assume default log is the first one
log_file = case file do
nil ->
- [default_log | _] = :rpc.call(get_rabbit_hostname(), :rabbit_lager, :log_locations, [])
+ [default_log | _] = :rpc.call(get_rabbit_hostname(), :rabbit, :log_locations, [])
default_log
_ -> file
end
diff --git a/deps/rabbitmq_cli/test/upgrade/drain_command_test.exs b/deps/rabbitmq_cli/test/upgrade/drain_command_test.exs
index 3533f7feff..90e6c7aeb5 100644
--- a/deps/rabbitmq_cli/test/upgrade/drain_command_test.exs
+++ b/deps/rabbitmq_cli/test/upgrade/drain_command_test.exs
@@ -23,6 +23,8 @@ defmodule DrainCommandTest do
end
setup context do
+ enable_feature_flag(:maintenance_mode_status)
+
{:ok, opts: %{
node: get_rabbit_hostname(),
timeout: context[:test_timeout] || 5000
diff --git a/deps/rabbitmq_cli/test/upgrade/revive_command_test.exs b/deps/rabbitmq_cli/test/upgrade/revive_command_test.exs
index 6d43d59b83..ba39c90469 100644
--- a/deps/rabbitmq_cli/test/upgrade/revive_command_test.exs
+++ b/deps/rabbitmq_cli/test/upgrade/revive_command_test.exs
@@ -23,6 +23,8 @@ defmodule ReviveCommandTest do
end
setup context do
+ enable_feature_flag(:maintenance_mode_status)
+
{:ok, opts: %{
node: get_rabbit_hostname(),
timeout: context[:test_timeout] || 5000
diff --git a/deps/rabbitmq_codegen/BUILD.bazel b/deps/rabbitmq_codegen/BUILD.bazel
new file mode 100644
index 0000000000..6aa6461d0f
--- /dev/null
+++ b/deps/rabbitmq_codegen/BUILD.bazel
@@ -0,0 +1,18 @@
+exports_files([
+ "amqp-1.0/messaging.xml",
+ "amqp-1.0/security.xml",
+ "amqp-1.0/transactions.xml",
+ "amqp-1.0/transport.xml",
+])
+
+exports_files([
+ "amqp-rabbitmq-0.9.1.json",
+ "credit_extension.json",
+ "amqp-rabbitmq-0.8.json",
+])
+
+py_library(
+ name = "amqp_codegen",
+ srcs = ["amqp_codegen.py"],
+ visibility = ["//visibility:public"],
+)
diff --git a/deps/rabbitmq_codegen/CONTRIBUTING.md b/deps/rabbitmq_codegen/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_codegen/CONTRIBUTING.md
+++ b/deps/rabbitmq_codegen/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_consistent_hash_exchange/BUILD.bazel b/deps/rabbitmq_consistent_hash_exchange/BUILD.bazel
new file mode 100644
index 0000000000..e6e640b752
--- /dev/null
+++ b/deps/rabbitmq_consistent_hash_exchange/BUILD.bazel
@@ -0,0 +1,58 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_consistent_hash_exchange"
+
+APP_DESCRIPTION = "Consistent Hash Exchange Type"
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_cli:rabbitmqctl",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_consistent_hash_exchange"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_exchange_type_consistent_hash_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_consistent_hash_exchange/Makefile b/deps/rabbitmq_consistent_hash_exchange/Makefile
index c63fd515a5..b1b5b07f68 100644
--- a/deps/rabbitmq_consistent_hash_exchange/Makefile
+++ b/deps/rabbitmq_consistent_hash_exchange/Makefile
@@ -17,5 +17,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_consistent_hash_exchange/erlang.mk b/deps/rabbitmq_consistent_hash_exchange/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_consistent_hash_exchange/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk b/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_consistent_hash_exchange/src/Elixir.RabbitMQ.CLI.Diagnostics.Commands.ConsistentHashExchangeRingStateCommand.erl b/deps/rabbitmq_consistent_hash_exchange/src/Elixir.RabbitMQ.CLI.Diagnostics.Commands.ConsistentHashExchangeRingStateCommand.erl
index 8f61b9726c..bb00ccf01e 100644
--- a/deps/rabbitmq_consistent_hash_exchange/src/Elixir.RabbitMQ.CLI.Diagnostics.Commands.ConsistentHashExchangeRingStateCommand.erl
+++ b/deps/rabbitmq_consistent_hash_exchange/src/Elixir.RabbitMQ.CLI.Diagnostics.Commands.ConsistentHashExchangeRingStateCommand.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module('Elixir.RabbitMQ.CLI.Diagnostics.Commands.ConsistentHashExchangeRingStateCommand').
@@ -12,6 +12,11 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref([
+ {'Elixir.RabbitMQ.CLI.Core.ExitCodes', exit_dataerr, 0},
+ {'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1}
+]).
+
-export([
usage/0,
usage_additional/0,
diff --git a/deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl b/deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl
index d33a79db2d..44d7dca3b2 100644
--- a/deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl
+++ b/deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_consistent_hash).
@@ -53,7 +53,7 @@ init() ->
{attributes, record_info(fields, chx_hash_ring)},
{type, ordered_set}]),
mnesia:add_table_copy(?HASH_RING_STATE_TABLE, node(), ram_copies),
- mnesia:wait_for_tables([?HASH_RING_STATE_TABLE], 30000),
+ rabbit_table:wait([?HASH_RING_STATE_TABLE]),
recover(),
ok.
@@ -227,7 +227,6 @@ remove_bindings(none, X, Bindings) ->
ok.
remove_binding(#binding{source = S, destination = D, key = RK}) ->
- Weight = rabbit_data_coercion:to_integer(RK),
rabbit_log:debug("Consistent hashing exchange: removing binding "
"from exchange '~p' to destination '~p' with routing key '~s'",
[rabbit_misc:rs(S), rabbit_misc:rs(D), RK]),
@@ -237,7 +236,7 @@ remove_binding(#binding{source = S, destination = D, key = RK}) ->
next_bucket_number = NexN0}] ->
%% Buckets with lower numbers stay as is; buckets that
%% belong to this binding are removed; buckets with
- %% greater numbers are updated (their numbers are adjusted downwards by weight)
+ %% greater numbers are updated (their numbers are adjusted downwards)
BucketsOfThisBinding = maps:filter(fun (_K, V) -> V =:= D end, BM0),
case maps:size(BucketsOfThisBinding) of
0 -> ok;
@@ -251,10 +250,10 @@ remove_binding(#binding{source = S, destination = D, key = RK}) ->
%% final state with "down the ring" buckets updated
NewBucketsDownTheRing = maps:fold(
fun(K0, V, Acc) ->
- maps:put(K0 - Weight, V, Acc)
+ maps:put(K0 - N, V, Acc)
end, #{}, BucketsDownTheRing),
BM1 = maps:merge(UnchangedBuckets, NewBucketsDownTheRing),
- NextN = NexN0 - Weight,
+ NextN = NexN0 - N,
State = State0#chx_hash_ring{bucket_map = BM1,
next_bucket_number = NextN},
diff --git a/deps/rabbitmq_consistent_hash_exchange/test/rabbit_exchange_type_consistent_hash_SUITE.erl b/deps/rabbitmq_consistent_hash_exchange/test/rabbit_exchange_type_consistent_hash_SUITE.erl
index 644f15a38a..1439c45b6d 100644
--- a/deps/rabbitmq_consistent_hash_exchange/test/rabbit_exchange_type_consistent_hash_SUITE.erl
+++ b/deps/rabbitmq_consistent_hash_exchange/test/rabbit_exchange_type_consistent_hash_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_consistent_hash_SUITE).
@@ -41,7 +41,9 @@ groups() ->
test_hash_ring_updates_when_exclusive_queues_are_deleted_due_to_connection_closure_case5,
test_hash_ring_updates_when_exclusive_queues_are_deleted_due_to_connection_closure_case6,
test_hash_ring_updates_when_exchange_is_deleted,
- test_hash_ring_updates_when_queue_is_unbound
+ test_hash_ring_updates_when_queue_is_unbound,
+ test_hash_ring_updates_when_duplicate_binding_is_created_and_queue_is_deleted,
+ test_hash_ring_updates_when_duplicate_binding_is_created_and_binding_is_deleted
]}
].
@@ -557,6 +559,99 @@ test_hash_ring_updates_when_queue_is_unbound(Config) ->
rabbit_ct_client_helpers:close_channel(Chan),
ok.
+test_hash_ring_updates_when_duplicate_binding_is_created_and_queue_is_deleted(Config) ->
+ Chan = rabbit_ct_client_helpers:open_channel(Config, 0),
+
+ X = <<"test_hash_ring_updates_when_duplicate_binding_is_created_and_queue_is_deleted">>,
+ amqp_channel:call(Chan, #'exchange.delete' {exchange = X}),
+
+ Declare = #'exchange.declare'{exchange = X,
+ type = <<"x-consistent-hash">>},
+ #'exchange.declare_ok'{} = amqp_channel:call(Chan, Declare),
+
+ Q1 = <<"f-q1">>,
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Chan, #'queue.declare'{
+ queue = Q1, durable = true, exclusive = false}),
+ #'queue.bind_ok'{} =
+ amqp_channel:call(Chan, #'queue.bind'{queue = Q1,
+ exchange = X,
+ routing_key = <<"2">>}),
+
+ #'queue.bind_ok'{} =
+ amqp_channel:call(Chan, #'queue.bind'{queue = Q1,
+ exchange = X,
+ routing_key = <<"3">>}),
+
+ ?assertEqual(5, count_buckets_of_exchange(Config, X)),
+ assert_ring_consistency(Config, X),
+
+ Q2 = <<"f-q2">>,
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Chan, #'queue.declare'{
+ queue = Q2, durable = true, exclusive = false}),
+ #'queue.bind_ok'{} =
+ amqp_channel:call(Chan, #'queue.bind'{queue = Q2,
+ exchange = X,
+ routing_key = <<"4">>}),
+
+ ?assertEqual(9, count_buckets_of_exchange(Config, X)),
+ assert_ring_consistency(Config, X),
+
+ amqp_channel:call(Chan, #'queue.delete' {queue = Q1}),
+ ?assertEqual(4, count_buckets_of_exchange(Config, X)),
+ assert_ring_consistency(Config, X),
+
+ clean_up_test_topology(Config, X, [Q1, Q2]),
+ rabbit_ct_client_helpers:close_channel(Chan),
+ ok.
+
+test_hash_ring_updates_when_duplicate_binding_is_created_and_binding_is_deleted(Config) ->
+ Chan = rabbit_ct_client_helpers:open_channel(Config, 0),
+
+ X = <<"test_hash_ring_updates_when_duplicate_binding_is_created_and_binding_is_deleted">>,
+ amqp_channel:call(Chan, #'exchange.delete' {exchange = X}),
+
+ Declare = #'exchange.declare'{exchange = X,
+ type = <<"x-consistent-hash">>},
+ #'exchange.declare_ok'{} = amqp_channel:call(Chan, Declare),
+
+ Q1 = <<"f-q1">>,
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Chan, #'queue.declare'{
+ queue = Q1, durable = true, exclusive = false}),
+ #'queue.bind_ok'{} =
+ amqp_channel:call(Chan, #'queue.bind'{queue = Q1,
+ exchange = X,
+ routing_key = <<"2">>}),
+
+ #'queue.bind_ok'{} =
+ amqp_channel:call(Chan, #'queue.bind'{queue = Q1,
+ exchange = X,
+ routing_key = <<"3">>}),
+
+ Q2 = <<"f-q2">>,
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Chan, #'queue.declare'{
+ queue = Q2, durable = true, exclusive = false}),
+ #'queue.bind_ok'{} =
+ amqp_channel:call(Chan, #'queue.bind'{queue = Q2,
+ exchange = X,
+ routing_key = <<"4">>}),
+
+ ?assertEqual(9, count_buckets_of_exchange(Config, X)),
+ assert_ring_consistency(Config, X),
+
+ %% Both bindings to Q1 will be deleted
+ amqp_channel:call(Chan, #'queue.unbind'{queue = Q1,
+ exchange = X,
+ routing_key = <<"3">>}),
+ ?assertEqual(4, count_buckets_of_exchange(Config, X)),
+ assert_ring_consistency(Config, X),
+
+ clean_up_test_topology(Config, X, [Q1, Q2]),
+ rabbit_ct_client_helpers:close_channel(Chan),
+ ok.
%%
%% Helpers
diff --git a/deps/rabbitmq_ct_client_helpers/.gitignore b/deps/rabbitmq_ct_client_helpers/.gitignore
new file mode 100644
index 0000000000..987a3071d0
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/.gitignore
@@ -0,0 +1,23 @@
+*~
+.sw?
+.*.sw?
+*.beam
+/.erlang.mk/
+/cover/
+/deps/
+/doc/
+/ebin/
+/escript/
+/escript.lock
+/logs/
+/plugins/
+/plugins.lock
+/sbin/
+/sbin.lock
+/xrefr
+
+/rabbitmq_ct_client_helpers.d
+/.rabbitmq_ct_client_helpers.plt
+
+/.bazelrc
+/bazel-*
diff --git a/deps/rabbitmq_ct_client_helpers/BUILD.bazel b/deps/rabbitmq_ct_client_helpers/BUILD.bazel
new file mode 100644
index 0000000000..785214e407
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/BUILD.bazel
@@ -0,0 +1,11 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlang_lib")
+
+erlang_lib(
+ app_name = "rabbitmq_ct_client_helpers",
+ app_version = "master",
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ],
+)
diff --git a/deps/rabbitmq_ct_client_helpers/CODE_OF_CONDUCT.md b/deps/rabbitmq_ct_client_helpers/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..1f6ef1c576
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/CODE_OF_CONDUCT.md
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+ without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_ct_client_helpers/CONTRIBUTING.md b/deps/rabbitmq_ct_client_helpers/CONTRIBUTING.md
new file mode 100644
index 0000000000..45bbcbe62e
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/CONTRIBUTING.md
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_ct_client_helpers/LICENSE b/deps/rabbitmq_ct_client_helpers/LICENSE
new file mode 100644
index 0000000000..f2da65d175
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/LICENSE
@@ -0,0 +1,4 @@
+This package is licensed under the MPL 2.0. For the MPL 2.0, please see LICENSE-MPL-RabbitMQ.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbitmq_ct_client_helpers/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_ct_client_helpers/LICENSE-MPL-RabbitMQ
new file mode 100644
index 0000000000..14e2f777f6
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/LICENSE-MPL-RabbitMQ
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/deps/rabbitmq_ct_client_helpers/Makefile b/deps/rabbitmq_ct_client_helpers/Makefile
new file mode 100644
index 0000000000..9cc749fd09
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/Makefile
@@ -0,0 +1,16 @@
+PROJECT = rabbitmq_ct_client_helpers
+PROJECT_DESCRIPTION = Common Test helpers for RabbitMQ (client-side helpers)
+
+DEPS = rabbit_common rabbitmq_ct_helpers amqp_client
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-build.mk \
+ rabbit_common/mk/rabbitmq-tools.mk
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_ct_client_helpers/WORKSPACE.bazel b/deps/rabbitmq_ct_client_helpers/WORKSPACE.bazel
new file mode 100644
index 0000000000..ae6897a168
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/WORKSPACE.bazel
@@ -0,0 +1,24 @@
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+http_archive(
+ name = "bazel-erlang",
+ sha256 = "422a9222522216f59a01703a13f578c601d6bddf5617bee8da3c43e3b299fc4e",
+ strip_prefix = "bazel-erlang-1.1.0",
+ urls = ["https://github.com/rabbitmq/bazel-erlang/archive/refs/tags/1.1.0.zip"],
+)
+
+http_archive(
+ name = "rabbitmq-server",
+ strip_prefix = "rabbitmq-server-master",
+ urls = ["https://github.com/rabbitmq/rabbitmq-server/archive/master.zip"],
+)
+
+http_archive(
+ name = "rabbitmq_ct_helpers",
+ strip_prefix = "rabbitmq-ct-helpers-master",
+ urls = ["https://github.com/rabbitmq/rabbitmq-ct-helpers/archive/master.zip"],
+)
+
+load("@rabbitmq-server//:workspace_helpers.bzl", "rabbitmq_external_deps")
+
+rabbitmq_external_deps()
diff --git a/deps/amqp10_client/erlang.mk b/deps/rabbitmq_ct_client_helpers/erlang.mk
index fce4be0b0a..ad3ddbf813 100644
--- a/deps/amqp10_client/erlang.mk
+++ b/deps/rabbitmq_ct_client_helpers/erlang.mk
@@ -17,7 +17,7 @@
ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
export ERLANG_MK_FILENAME
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
+ERLANG_MK_VERSION = 2019.07.01-53-gd80984c
ERLANG_MK_WITHOUT =
# Make 3.81 and 3.82 are deprecated.
@@ -955,10 +955,10 @@ pkg_cr_commit = master
PACKAGES += cuttlefish
pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_description = cuttlefish configuration abstraction
+pkg_cuttlefish_homepage = https://github.com/Kyorai/cuttlefish
pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_repo = https://github.com/Kyorai/cuttlefish
pkg_cuttlefish_commit = master
PACKAGES += damocles
@@ -2335,7 +2335,7 @@ pkg_jsx_description = An Erlang application for consuming, producing and manipul
pkg_jsx_homepage = https://github.com/talentdeficit/jsx
pkg_jsx_fetch = git
pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
+pkg_jsx_commit = main
PACKAGES += kafka
pkg_kafka_name = kafka
@@ -2823,7 +2823,7 @@ pkg_mysql_description = MySQL client library for Erlang/OTP
pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
pkg_mysql_fetch = git
pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
+pkg_mysql_commit = 1.7.0
PACKAGES += n2o
pkg_n2o_name = n2o
@@ -4692,8 +4692,19 @@ define dep_autopatch_rebar.erl
case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
{ok, Lock} ->
io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
+ LockPkgs = case lists:keyfind("1.2.0", 1, Lock) of
+ {_, LP} ->
+ LP;
+ _ ->
+ case lists:keyfind("1.1.0", 1, Lock) of
+ {_, LP} ->
+ LP;
+ _ ->
+ false
+ end
+ end,
+ if
+ is_list(LockPkgs) ->
io:format("~p~n", [LockPkgs]),
case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
{_, {pkg, _, Vsn}, _} ->
@@ -4702,7 +4713,7 @@ define dep_autopatch_rebar.erl
_ ->
false
end;
- _ ->
+ true ->
false
end;
_ ->
@@ -6987,6 +6998,343 @@ apps-eunit: test-build
endif
endif
+# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+HEX_CORE_GIT ?= https://github.com/hexpm/hex_core
+HEX_CORE_COMMIT ?= v0.7.0
+
+PACKAGES += hex_core
+pkg_hex_core_name = hex_core
+pkg_hex_core_description = Reference implementation of Hex specifications
+pkg_hex_core_homepage = $(HEX_CORE_GIT)
+pkg_hex_core_fetch = git
+pkg_hex_core_repo = $(HEX_CORE_GIT)
+pkg_hex_core_commit = $(HEX_CORE_COMMIT)
+
+# We automatically depend on hex_core when the project isn't already.
+$(if $(filter hex_core,$(DEPS) $(BUILD_DEPS) $(DOC_DEPS) $(REL_DEPS) $(TEST_DEPS)),,\
+ $(eval $(call dep_target,hex_core)))
+
+hex-core: $(DEPS_DIR)/hex_core
+ $(verbose) if [ ! -e $(DEPS_DIR)/hex_core/ebin/dep_built ]; then \
+ $(MAKE) -C $(DEPS_DIR)/hex_core IS_DEP=1; \
+ touch $(DEPS_DIR)/hex_core/ebin/dep_built; \
+ fi
+
+# @todo This must also apply to fetching.
+HEX_CONFIG ?=
+
+define hex_config.erl
+ begin
+ Config0 = hex_core:default_config(),
+ Config0$(HEX_CONFIG)
+ end
+endef
+
+define hex_user_create.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ case hex_api_user:create(Config, <<"$(strip $1)">>, <<"$(strip $2)">>, <<"$(strip $3)">>) of
+ {ok, {201, _, #{<<"email">> := Email, <<"url">> := URL, <<"username">> := Username}}} ->
+ io:format("User ~s (~s) created at ~s~n"
+ "Please check your inbox for a confirmation email.~n"
+ "You must confirm before you are allowed to publish packages.~n",
+ [Username, Email, URL]),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(80)
+ end
+endef
+
+# The $(info ) call inserts a new line after the password prompt.
+hex-user-create: hex-core
+ $(if $(HEX_USERNAME),,$(eval HEX_USERNAME := $(shell read -p "Username: " username; echo $$username)))
+ $(if $(HEX_PASSWORD),,$(eval HEX_PASSWORD := $(shell stty -echo; read -p "Password: " password; stty echo; echo $$password) $(info )))
+ $(if $(HEX_EMAIL),,$(eval HEX_EMAIL := $(shell read -p "Email: " email; echo $$email)))
+ $(gen_verbose) $(call erlang,$(call hex_user_create.erl,$(HEX_USERNAME),$(HEX_PASSWORD),$(HEX_EMAIL)))
+
+define hex_key_add.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => iolist_to_binary([<<"Basic ">>, base64:encode(<<"$(strip $1):$(strip $2)">>)])},
+ Permissions = [
+ case string:split(P, <<":">>) of
+ [D] -> #{domain => D};
+ [D, R] -> #{domain => D, resource => R}
+ end
+ || P <- string:split(<<"$(strip $4)">>, <<",">>, all)],
+ case hex_api_key:add(ConfigF, <<"$(strip $3)">>, Permissions) of
+ {ok, {201, _, #{<<"secret">> := Secret}}} ->
+ io:format("Key ~s created for user ~s~nSecret: ~s~n"
+ "Please store the secret in a secure location, such as a password store.~n"
+ "The secret will be requested for most Hex-related operations.~n",
+ [<<"$(strip $3)">>, <<"$(strip $1)">>, Secret]),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(81)
+ end
+endef
+
+hex-key-add: hex-core
+ $(if $(HEX_USERNAME),,$(eval HEX_USERNAME := $(shell read -p "Username: " username; echo $$username)))
+ $(if $(HEX_PASSWORD),,$(eval HEX_PASSWORD := $(shell stty -echo; read -p "Password: " password; stty echo; echo $$password) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_key_add.erl,$(HEX_USERNAME),$(HEX_PASSWORD),\
+ $(if $(name),$(name),$(shell hostname)-erlang-mk),\
+ $(if $(perm),$(perm),api)))
+
+HEX_TARBALL_EXTRA_METADATA ?=
+
+# @todo Check that we can += files
+HEX_TARBALL_FILES ?= \
+ $(wildcard early-plugins.mk) \
+ $(wildcard ebin/$(PROJECT).app) \
+ $(wildcard ebin/$(PROJECT).appup) \
+ $(wildcard $(notdir $(ERLANG_MK_FILENAME))) \
+ $(sort $(call core_find,include/,*.hrl)) \
+ $(wildcard LICENSE*) \
+ $(wildcard Makefile) \
+ $(wildcard plugins.mk) \
+ $(sort $(call core_find,priv/,*)) \
+ $(wildcard README*) \
+ $(wildcard rebar.config) \
+ $(sort $(call core_find,src/,*))
+
+HEX_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT).tar
+
+# @todo Need to check for rebar.config and/or the absence of DEPS to know
+# whether a project will work with Rebar.
+#
+# @todo contributors licenses links in HEX_TARBALL_EXTRA_METADATA
+
+# In order to build the requirements metadata we look into DEPS.
+# We do not require that the project use Hex dependencies, however
+# Hex.pm does require that the package name and version numbers
+# correspond to a real Hex package.
+define hex_tarball_create.erl
+ Files0 = [$(call comma_list,$(patsubst %,"%",$(HEX_TARBALL_FILES)))],
+ Requirements0 = #{
+ $(foreach d,$(DEPS),
+ <<"$(if $(subst hex,,$(call query_fetch_method,$d)),$d,$(if $(word 3,$(dep_$d)),$(word 3,$(dep_$d)),$d))">> => #{
+ <<"app">> => <<"$d">>,
+ <<"optional">> => false,
+ <<"requirement">> => <<"$(call query_version,$d)">>
+ },)
+ $(if $(DEPS),dummy => dummy)
+ },
+ Requirements = maps:remove(dummy, Requirements0),
+ Metadata0 = #{
+ app => <<"$(strip $(PROJECT))">>,
+ build_tools => [<<"make">>, <<"rebar3">>],
+ description => <<"$(strip $(PROJECT_DESCRIPTION))">>,
+ files => [unicode:characters_to_binary(F) || F <- Files0],
+ name => <<"$(strip $(PROJECT))">>,
+ requirements => Requirements,
+ version => <<"$(strip $(PROJECT_VERSION))">>
+ },
+ Metadata = Metadata0$(HEX_TARBALL_EXTRA_METADATA),
+ Files = [case file:read_file(F) of
+ {ok, Bin} ->
+ {F, Bin};
+ {error, Reason} ->
+ io:format("Error trying to open file ~0p: ~0p~n", [F, Reason]),
+ halt(82)
+ end || F <- Files0],
+ case hex_tarball:create(Metadata, Files) of
+ {ok, #{tarball := Tarball}} ->
+ ok = file:write_file("$(strip $(HEX_TARBALL_OUTPUT_FILE))", Tarball),
+ halt(0);
+ {error, Reason} ->
+ io:format("Error ~0p~n", [Reason]),
+ halt(83)
+ end
+endef
+
+hex_tar_verbose_0 = @echo " TAR $(notdir $(ERLANG_MK_TMP))/$(@F)";
+hex_tar_verbose_2 = set -x;
+hex_tar_verbose = $(hex_tar_verbose_$(V))
+
+$(HEX_TARBALL_OUTPUT_FILE): hex-core app
+ $(hex_tar_verbose) $(call erlang,$(call hex_tarball_create.erl))
+
+hex-tarball-create: $(HEX_TARBALL_OUTPUT_FILE)
+
+define hex_release_publish_summary.erl
+ {ok, Tarball} = erl_tar:open("$(strip $(HEX_TARBALL_OUTPUT_FILE))", [read]),
+ ok = erl_tar:extract(Tarball, [{cwd, "$(ERLANG_MK_TMP)"}, {files, ["metadata.config"]}]),
+ {ok, Metadata} = file:consult("$(ERLANG_MK_TMP)/metadata.config"),
+ #{
+ <<"name">> := Name,
+ <<"version">> := Version,
+ <<"files">> := Files,
+ <<"requirements">> := Deps
+ } = maps:from_list(Metadata),
+ io:format("Publishing ~s ~s~n Dependencies:~n", [Name, Version]),
+ case Deps of
+ [] ->
+ io:format(" (none)~n");
+ _ ->
+ [begin
+ #{<<"app">> := DA, <<"requirement">> := DR} = maps:from_list(D),
+ io:format(" ~s ~s~n", [DA, DR])
+ end || {_, D} <- Deps]
+ end,
+ io:format(" Included files:~n"),
+ [io:format(" ~s~n", [F]) || F <- Files],
+ io:format("You may also review the contents of the tarball file.~n"
+ "Please enter your secret key to proceed.~n"),
+ halt(0)
+endef
+
+define hex_release_publish.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ {ok, Tarball} = file:read_file("$(strip $(HEX_TARBALL_OUTPUT_FILE))"),
+ case hex_api_release:publish(ConfigF, Tarball, [{replace, $2}]) of
+ {ok, {200, _, #{}}} ->
+ io:format("Release replaced~n"),
+ halt(0);
+ {ok, {201, _, #{}}} ->
+ io:format("Release published~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(84)
+ end
+endef
+
+hex-release-tarball: hex-core $(HEX_TARBALL_OUTPUT_FILE)
+ $(verbose) $(call erlang,$(call hex_release_publish_summary.erl))
+
+hex-release-publish: hex-core hex-release-tarball
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_publish.erl,$(HEX_SECRET),false))
+
+hex-release-replace: hex-core hex-release-tarball
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_publish.erl,$(HEX_SECRET),true))
+
+define hex_release_delete.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api_release:delete(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $(PROJECT_VERSION))">>) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $(PROJECT_VERSION)) deleted~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(85)
+ end
+endef
+
+hex-release-delete: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_delete.erl,$(HEX_SECRET)))
+
+define hex_release_retire.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ Params = #{<<"reason">> => <<"$(strip $3)">>, <<"message">> => <<"$(strip $4)">>},
+ case hex_api_release:retire(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $2)">>, Params) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $2) has been retired~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(86)
+ end
+endef
+
+hex-release-retire: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_retire.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION)),\
+ $(if $(HEX_REASON),$(HEX_REASON),invalid),\
+ $(HEX_MESSAGE)))
+
+define hex_release_unretire.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api_release:unretire(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $2)">>) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $2) is not retired anymore~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(87)
+ end
+endef
+
+hex-release-unretire: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_unretire.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION))))
+
+HEX_DOCS_DOC_DIR ?= doc/
+HEX_DOCS_TARBALL_FILES ?= $(sort $(call core_find,$(HEX_DOCS_DOC_DIR),*))
+HEX_DOCS_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT)-docs.tar.gz
+
+$(HEX_DOCS_TARBALL_OUTPUT_FILE): hex-core app docs
+ $(hex_tar_verbose) tar czf $(HEX_DOCS_TARBALL_OUTPUT_FILE) -C $(HEX_DOCS_DOC_DIR) \
+ $(HEX_DOCS_TARBALL_FILES:$(HEX_DOCS_DOC_DIR)%=%)
+
+hex-docs-tarball-create: $(HEX_DOCS_TARBALL_OUTPUT_FILE)
+
+define hex_docs_publish.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ {ok, Tarball} = file:read_file("$(strip $(HEX_DOCS_TARBALL_OUTPUT_FILE))"),
+ case hex_api:post(ConfigF,
+ ["packages", "$(strip $(PROJECT))", "releases", "$(strip $(PROJECT_VERSION))", "docs"],
+ {"application/octet-stream", Tarball}) of
+ {ok, {Status, _, _}} when Status >= 200, Status < 300 ->
+ io:format("Docs published~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(88)
+ end
+endef
+
+hex-docs-publish: hex-core hex-docs-tarball-create
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_docs_publish.erl,$(HEX_SECRET)))
+
+define hex_docs_delete.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api:delete(ConfigF,
+ ["packages", "$(strip $(PROJECT))", "releases", "$(strip $2)", "docs"]) of
+ {ok, {Status, _, _}} when Status >= 200, Status < 300 ->
+ io:format("Docs removed~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(89)
+ end
+endef
+
+hex-docs-delete: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_docs_delete.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION))))
+
# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
# This file is part of erlang.mk and subject to the terms of the ISC License.
diff --git a/deps/amqp10_client/rabbitmq-components.mk b/deps/rabbitmq_ct_client_helpers/rabbitmq-components.mk
index b2a3be8b35..21edf8fcbd 100644
--- a/deps/amqp10_client/rabbitmq-components.mk
+++ b/deps/rabbitmq_ct_client_helpers/rabbitmq-components.mk
@@ -117,7 +117,7 @@ dep_cowlib = hex 2.9.1
dep_jsx = hex 2.11.0
dep_lager = hex 3.8.0
dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
+dep_ra = git https://github.com/rabbitmq/ra.git main
dep_ranch = hex 1.7.1
dep_recon = hex 2.5.1
dep_observer_cli = hex 1.5.4
diff --git a/deps/rabbitmq_ct_client_helpers/src/rabbit_ct_client_helpers.erl b/deps/rabbitmq_ct_client_helpers/src/rabbit_ct_client_helpers.erl
new file mode 100644
index 0000000000..d084337189
--- /dev/null
+++ b/deps/rabbitmq_ct_client_helpers/src/rabbit_ct_client_helpers.erl
@@ -0,0 +1,302 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_ct_client_helpers).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+
+-export([
+ setup_steps/0,
+ teardown_steps/0,
+ start_channels_managers/1,
+ stop_channels_managers/1,
+
+ open_connection/2,
+ open_unmanaged_connection/1, open_unmanaged_connection/2,
+ open_unmanaged_connection/3, open_unmanaged_connection/4, open_unmanaged_connection/5,
+ open_unmanaged_connection_direct/1, open_unmanaged_connection_direct/2,
+ open_unmanaged_connection_direct/3, open_unmanaged_connection_direct/4,
+ open_unmanaged_connection_direct/5,
+ open_unmanaged_connection/6,
+ close_connection/1, open_channel/2, open_channel/1,
+ close_channel/1,
+ open_connection_and_channel/2, open_connection_and_channel/1,
+ close_connection_and_channel/2,
+ close_channels_and_connection/2,
+
+ publish/3, consume/3, consume_without_acknowledging/3, fetch/3
+ ]).
+
+%% -------------------------------------------------------------------
+%% Client setup/teardown steps.
+%% -------------------------------------------------------------------
+
+setup_steps() ->
+ [
+ fun start_channels_managers/1
+ ].
+
+teardown_steps() ->
+ [
+ fun stop_channels_managers/1
+ ].
+
+start_channels_managers(Config) ->
+ ok = application:set_env(amqp_client, gen_server_call_timeout, infinity),
+ NodeConfigs = rabbit_ct_broker_helpers:get_node_configs(Config),
+ NodeConfigs1 = [start_channels_manager(NC) || NC <- NodeConfigs],
+ rabbit_ct_helpers:set_config(Config, {rmq_nodes, NodeConfigs1}).
+
+start_channels_manager(NodeConfig) ->
+ Pid = erlang:spawn(
+ fun() -> channels_manager(NodeConfig, undefined, []) end),
+ rabbit_ct_helpers:set_config(NodeConfig, {channels_manager, Pid}).
+
+stop_channels_managers(Config) ->
+ NodeConfigs = rabbit_ct_broker_helpers:get_node_configs(Config),
+ NodeConfigs1 = [stop_channels_manager(NC) || NC <- NodeConfigs],
+ rabbit_ct_helpers:set_config(Config, {rmq_nodes, NodeConfigs1}).
+
+stop_channels_manager(NodeConfig) ->
+ Pid = ?config(channels_manager, NodeConfig),
+ Pid ! stop,
+ proplists:delete(channels_manager, NodeConfig).
+
+channels_manager(NodeConfig, ConnTuple, Channels) ->
+ receive
+ {open_connection, From} ->
+ {Conn1, _} = ConnTuple1 = open_conn(NodeConfig, ConnTuple),
+ From ! Conn1,
+ channels_manager(NodeConfig, ConnTuple1, Channels);
+ {open_channel, From} ->
+ {Conn1, _} = ConnTuple1 = open_conn(NodeConfig, ConnTuple),
+ {ok, Ch} = amqp_connection:open_channel(Conn1),
+ ChMRef = erlang:monitor(process, Ch),
+ From ! Ch,
+ channels_manager(NodeConfig, ConnTuple1,
+ [{Ch, ChMRef} | Channels]);
+ {close_everything, From} ->
+ close_everything(ConnTuple, Channels),
+ From ! ok,
+ channels_manager(NodeConfig, undefined, []);
+ {'DOWN', ConnMRef, process, Conn, _}
+ when {Conn, ConnMRef} =:= ConnTuple ->
+ channels_manager(NodeConfig, undefined, Channels);
+ {'DOWN', ChMRef, process, Ch, _} ->
+ Channels1 = Channels -- [{Ch, ChMRef}],
+ channels_manager(NodeConfig, ConnTuple, Channels1);
+ stop ->
+ close_everything(ConnTuple, Channels);
+ Unhandled ->
+ ct:pal(?LOW_IMPORTANCE,
+ "Channels manager ~p: unhandled message: ~p",
+ [self(), Unhandled]),
+ channels_manager(NodeConfig, ConnTuple, Channels)
+ end.
+
+open_conn(NodeConfig, undefined) ->
+ Port = ?config(tcp_port_amqp, NodeConfig),
+ Params = #amqp_params_network{port = Port},
+ {ok, Conn} = amqp_connection:start(Params),
+ MRef = erlang:monitor(process, Conn),
+ {Conn, MRef};
+open_conn(NodeConfig, {Conn, _} = ConnTuple) ->
+ case erlang:is_process_alive(Conn) of
+ true -> ConnTuple;
+ false -> open_conn(NodeConfig, undefined)
+ end.
+
+close_everything(Conn, [{Ch, MRef} | Rest]) ->
+ case erlang:is_process_alive(Ch) of
+ true ->
+ amqp_channel:close(Ch),
+ receive
+ {'DOWN', MRef, _, Ch, Info} ->
+ ct:pal("Channel ~p closed: ~p~n", [Ch, Info])
+ end;
+ false ->
+ ok
+ end,
+ close_everything(Conn, Rest);
+close_everything({Conn, MRef}, []) ->
+ case erlang:is_process_alive(Conn) of
+ true ->
+ amqp_connection:close(Conn),
+ receive
+ {'DOWN', MRef, _, Conn, Info} ->
+ ct:pal("Connection ~p closed: ~p~n", [Conn, Info])
+ end;
+ false ->
+ ok
+ end;
+close_everything(undefined, []) ->
+ ok.
+
+%% -------------------------------------------------------------------
+%% Public API.
+%% -------------------------------------------------------------------
+
+open_connection(Config, Node) ->
+ Pid = rabbit_ct_broker_helpers:get_node_config(Config, Node,
+ channels_manager),
+ Pid ! {open_connection, self()},
+ receive
+ Conn when is_pid(Conn) -> Conn
+ end.
+
+open_unmanaged_connection(Config) ->
+ open_unmanaged_connection(Config, 0).
+
+open_unmanaged_connection(Config, Node) ->
+ open_unmanaged_connection(Config, Node, ?config(rmq_vhost, Config)).
+
+open_unmanaged_connection(Config, Node, VHost) ->
+ open_unmanaged_connection(Config, Node, VHost,
+ ?config(rmq_username, Config), ?config(rmq_password, Config)).
+
+open_unmanaged_connection(Config, Node, Username, Password) ->
+ open_unmanaged_connection(Config, Node, ?config(rmq_vhost, Config),
+ Username, Password).
+
+open_unmanaged_connection(Config, Node, VHost, Username, Password) ->
+ open_unmanaged_connection(Config, Node, VHost, Username, Password,
+ network).
+
+open_unmanaged_connection_direct(Config) ->
+ open_unmanaged_connection_direct(Config, 0).
+
+open_unmanaged_connection_direct(Config, Node) ->
+ open_unmanaged_connection_direct(Config, Node, ?config(rmq_vhost, Config)).
+
+open_unmanaged_connection_direct(Config, Node, VHost) ->
+ open_unmanaged_connection_direct(Config, Node, VHost,
+ ?config(rmq_username, Config), ?config(rmq_password, Config)).
+
+open_unmanaged_connection_direct(Config, Node, Username, Password) ->
+ open_unmanaged_connection_direct(Config, Node, ?config(rmq_vhost, Config),
+ Username, Password).
+
+open_unmanaged_connection_direct(Config, Node, VHost, Username, Password) ->
+ open_unmanaged_connection(Config, Node, VHost, Username, Password, direct).
+
+open_unmanaged_connection(Config, Node, VHost, Username, Password, Type) ->
+ Params = case Type of
+ network ->
+ Port = rabbit_ct_broker_helpers:get_node_config(Config, Node,
+ tcp_port_amqp),
+ #amqp_params_network{port = Port,
+ virtual_host = VHost,
+ username = Username,
+ password = Password};
+ direct ->
+ NodeName = rabbit_ct_broker_helpers:get_node_config(Config, Node,
+ nodename),
+ #amqp_params_direct{node = NodeName,
+ virtual_host = VHost,
+ username = Username,
+ password = Password}
+ end,
+ case amqp_connection:start(Params) of
+ {ok, Conn} -> Conn;
+ {error, _} = Error -> Error
+ end.
+
+open_channel(Config) ->
+ open_channel(Config, 0).
+
+open_channel(Config, Node) ->
+ Pid = rabbit_ct_broker_helpers:get_node_config(Config, Node,
+ channels_manager),
+ Pid ! {open_channel, self()},
+ receive
+ Ch when is_pid(Ch) -> Ch
+ end.
+
+open_connection_and_channel(Config) ->
+ open_connection_and_channel(Config, 0).
+
+open_connection_and_channel(Config, Node) ->
+ Conn = open_connection(Config, Node),
+ Ch = open_channel(Config, Node),
+ {Conn, Ch}.
+
+close_channel(Ch) ->
+ case is_process_alive(Ch) of
+ true -> amqp_channel:close(Ch);
+ false -> ok
+ end.
+
+close_connection(Conn) ->
+ case is_process_alive(Conn) of
+ true -> amqp_connection:close(Conn);
+ false -> ok
+ end.
+
+close_connection_and_channel(Conn, Ch) ->
+ _ = close_channel(Ch),
+ case close_connection(Conn) of
+ ok -> ok;
+ closing -> ok
+ end.
+
+close_channels_and_connection(Config, Node) ->
+ Pid = rabbit_ct_broker_helpers:get_node_config(Config, Node,
+ channels_manager),
+ Pid ! {close_everything, self()},
+ receive
+ ok -> ok
+ end.
+
+publish(Ch, QName, Count) ->
+ amqp_channel:call(Ch, #'confirm.select'{}),
+ [amqp_channel:call(Ch,
+ #'basic.publish'{routing_key = QName},
+ #amqp_msg{props = #'P_basic'{delivery_mode = 2},
+ payload = list_to_binary(integer_to_list(I))})
+ || I <- lists:seq(1, Count)],
+ amqp_channel:wait_for_confirms(Ch).
+
+consume(Ch, QName, Count) ->
+ amqp_channel:subscribe(Ch, #'basic.consume'{queue = QName, no_ack = true},
+ self()),
+ CTag = receive #'basic.consume_ok'{consumer_tag = C} -> C end,
+ [begin
+ Exp = list_to_binary(integer_to_list(I)),
+ receive {#'basic.deliver'{consumer_tag = CTag},
+ #amqp_msg{payload = Exp}} ->
+ ok
+ after 5000 ->
+ exit(timeout)
+ end
+ end || I <- lists:seq(1, Count)],
+ amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = CTag}),
+ ok.
+
+consume_without_acknowledging(Ch, QName, Count) ->
+ amqp_channel:subscribe(Ch, #'basic.consume'{queue = QName, no_ack = false},
+ self()),
+ CTag = receive #'basic.consume_ok'{consumer_tag = C} -> C end,
+ accumulate_without_acknowledging(Ch, CTag, Count, []).
+
+accumulate_without_acknowledging(Ch, CTag, Remaining, Acc) when Remaining =:= 0 ->
+ amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = CTag}),
+ lists:reverse(Acc);
+accumulate_without_acknowledging(Ch, CTag, Remaining, Acc) ->
+ receive {#'basic.deliver'{consumer_tag = CTag, delivery_tag = DTag}, _MSg} ->
+ accumulate_without_acknowledging(Ch, CTag, Remaining - 1, [DTag | Acc])
+ after 5000 ->
+ amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = CTag}),
+ exit(timeout)
+ end.
+
+
+fetch(Ch, QName, Count) ->
+ [{#'basic.get_ok'{}, _} =
+ amqp_channel:call(Ch, #'basic.get'{queue = QName}) ||
+ _ <- lists:seq(1, Count)],
+ ok.
diff --git a/deps/rabbitmq_ct_helpers/.gitignore b/deps/rabbitmq_ct_helpers/.gitignore
new file mode 100644
index 0000000000..85195da77a
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/.gitignore
@@ -0,0 +1,27 @@
+*~
+.sw?
+.*.sw?
+*.beam
+.terraform/
+.terraform-*
+terraform.tfstate*
+*terraform.lock*
+/.erlang.mk/
+/cover/
+/deps/
+/doc/
+/ebin/
+/escript/
+/escript.lock
+/logs/
+/plugins/
+/plugins.lock
+/sbin/
+/sbin.lock
+/xrefr
+
+/rabbitmq_ct_helpers.d
+/.rabbitmq_ct_helpers.plt
+
+/.bazelrc
+/bazel-*
diff --git a/deps/rabbitmq_ct_helpers/BUILD.bazel b/deps/rabbitmq_ct_helpers/BUILD.bazel
new file mode 100644
index 0000000000..994b7ac438
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/BUILD.bazel
@@ -0,0 +1,14 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlang_lib")
+
+erlang_lib(
+ app_name = "rabbitmq_ct_helpers",
+ app_version = "master",
+ extra_priv = [
+ "tools/tls-certs/Makefile",
+ "tools/tls-certs/openssl.cnf.in",
+ ],
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@proper//:bazel_erlang_lib",
+ ],
+)
diff --git a/deps/rabbitmq_ct_helpers/CODE_OF_CONDUCT.md b/deps/rabbitmq_ct_helpers/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..08697906fd
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/CODE_OF_CONDUCT.md
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+ without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](https://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](https://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_ct_helpers/CONTRIBUTING.md b/deps/rabbitmq_ct_helpers/CONTRIBUTING.md
new file mode 100644
index 0000000000..23a92fef9c
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/CONTRIBUTING.md
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_ct_helpers/LICENSE b/deps/rabbitmq_ct_helpers/LICENSE
new file mode 100644
index 0000000000..5bba9af374
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/LICENSE
@@ -0,0 +1,12 @@
+This package, rabbitmq_ct_helpers, is dual-licensed under
+the Apache License v2 and the Mozilla Public License v2.0.
+
+For the Apache License, please see the file LICENSE-APACHE2.
+
+For the Mozilla Public License, please see the file LICENSE-MPL-RabbitMQ.
+
+For attribution of copyright and other details of provenance, please
+refer to the source code.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbitmq_ct_helpers/LICENSE-APACHE2 b/deps/rabbitmq_ct_helpers/LICENSE-APACHE2
new file mode 100644
index 0000000000..7017ed9108
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/LICENSE-APACHE2
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2017-2020 VMware, Inc. or its affiliates.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/deps/rabbitmq_ct_helpers/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_ct_helpers/LICENSE-MPL-RabbitMQ
new file mode 100644
index 0000000000..14e2f777f6
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/LICENSE-MPL-RabbitMQ
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/deps/rabbitmq_ct_helpers/Makefile b/deps/rabbitmq_ct_helpers/Makefile
new file mode 100644
index 0000000000..07c7d7dbf1
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/Makefile
@@ -0,0 +1,25 @@
+PROJECT = rabbitmq_ct_helpers
+PROJECT_DESCRIPTION = Common Test helpers for RabbitMQ
+
+DEPS = rabbit_common proper inet_tcp_proxy
+TEST_DEPS = rabbit
+
+dep_rabbit_common = git-subfolder https://github.com/rabbitmq/rabbitmq-server master deps/rabbit_common
+dep_rabbit = git-subfolder https://github.com/rabbitmq/rabbitmq-server master deps/rabbit
+dep_inet_tcp_proxy = git https://github.com/rabbitmq/inet_tcp_proxy master
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-build.mk \
+ rabbit_common/mk/rabbitmq-dist.mk \
+ rabbit_common/mk/rabbitmq-run.mk \
+ rabbit_common/mk/rabbitmq-tools.mk
+
+include rabbitmq-components.mk
+include erlang.mk
+
+ERLC_OPTS += +nowarn_export_all
diff --git a/deps/rabbitmq_ct_helpers/WORKSPACE.bazel b/deps/rabbitmq_ct_helpers/WORKSPACE.bazel
new file mode 100644
index 0000000000..e863f6c2b8
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/WORKSPACE.bazel
@@ -0,0 +1,18 @@
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+http_archive(
+ name = "bazel-erlang",
+ sha256 = "422a9222522216f59a01703a13f578c601d6bddf5617bee8da3c43e3b299fc4e",
+ strip_prefix = "bazel-erlang-1.1.0",
+ urls = ["https://github.com/rabbitmq/bazel-erlang/archive/refs/tags/1.1.0.zip"],
+)
+
+http_archive(
+ name = "rabbitmq-server",
+ strip_prefix = "rabbitmq-server-master",
+ urls = ["https://github.com/rabbitmq/rabbitmq-server/archive/master.zip"],
+)
+
+load("@rabbitmq-server//:workspace_helpers.bzl", "rabbitmq_external_deps")
+
+rabbitmq_external_deps()
diff --git a/deps/amqp10_common/erlang.mk b/deps/rabbitmq_ct_helpers/erlang.mk
index fce4be0b0a..ad3ddbf813 100644
--- a/deps/amqp10_common/erlang.mk
+++ b/deps/rabbitmq_ct_helpers/erlang.mk
@@ -17,7 +17,7 @@
ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
export ERLANG_MK_FILENAME
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
+ERLANG_MK_VERSION = 2019.07.01-53-gd80984c
ERLANG_MK_WITHOUT =
# Make 3.81 and 3.82 are deprecated.
@@ -955,10 +955,10 @@ pkg_cr_commit = master
PACKAGES += cuttlefish
pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_description = cuttlefish configuration abstraction
+pkg_cuttlefish_homepage = https://github.com/Kyorai/cuttlefish
pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_repo = https://github.com/Kyorai/cuttlefish
pkg_cuttlefish_commit = master
PACKAGES += damocles
@@ -2335,7 +2335,7 @@ pkg_jsx_description = An Erlang application for consuming, producing and manipul
pkg_jsx_homepage = https://github.com/talentdeficit/jsx
pkg_jsx_fetch = git
pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
+pkg_jsx_commit = main
PACKAGES += kafka
pkg_kafka_name = kafka
@@ -2823,7 +2823,7 @@ pkg_mysql_description = MySQL client library for Erlang/OTP
pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
pkg_mysql_fetch = git
pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
+pkg_mysql_commit = 1.7.0
PACKAGES += n2o
pkg_n2o_name = n2o
@@ -4692,8 +4692,19 @@ define dep_autopatch_rebar.erl
case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
{ok, Lock} ->
io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
+ LockPkgs = case lists:keyfind("1.2.0", 1, Lock) of
+ {_, LP} ->
+ LP;
+ _ ->
+ case lists:keyfind("1.1.0", 1, Lock) of
+ {_, LP} ->
+ LP;
+ _ ->
+ false
+ end
+ end,
+ if
+ is_list(LockPkgs) ->
io:format("~p~n", [LockPkgs]),
case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
{_, {pkg, _, Vsn}, _} ->
@@ -4702,7 +4713,7 @@ define dep_autopatch_rebar.erl
_ ->
false
end;
- _ ->
+ true ->
false
end;
_ ->
@@ -6987,6 +6998,343 @@ apps-eunit: test-build
endif
endif
+# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+HEX_CORE_GIT ?= https://github.com/hexpm/hex_core
+HEX_CORE_COMMIT ?= v0.7.0
+
+PACKAGES += hex_core
+pkg_hex_core_name = hex_core
+pkg_hex_core_description = Reference implementation of Hex specifications
+pkg_hex_core_homepage = $(HEX_CORE_GIT)
+pkg_hex_core_fetch = git
+pkg_hex_core_repo = $(HEX_CORE_GIT)
+pkg_hex_core_commit = $(HEX_CORE_COMMIT)
+
+# We automatically depend on hex_core when the project isn't already.
+$(if $(filter hex_core,$(DEPS) $(BUILD_DEPS) $(DOC_DEPS) $(REL_DEPS) $(TEST_DEPS)),,\
+ $(eval $(call dep_target,hex_core)))
+
+hex-core: $(DEPS_DIR)/hex_core
+ $(verbose) if [ ! -e $(DEPS_DIR)/hex_core/ebin/dep_built ]; then \
+ $(MAKE) -C $(DEPS_DIR)/hex_core IS_DEP=1; \
+ touch $(DEPS_DIR)/hex_core/ebin/dep_built; \
+ fi
+
+# @todo This must also apply to fetching.
+HEX_CONFIG ?=
+
+define hex_config.erl
+ begin
+ Config0 = hex_core:default_config(),
+ Config0$(HEX_CONFIG)
+ end
+endef
+
+define hex_user_create.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ case hex_api_user:create(Config, <<"$(strip $1)">>, <<"$(strip $2)">>, <<"$(strip $3)">>) of
+ {ok, {201, _, #{<<"email">> := Email, <<"url">> := URL, <<"username">> := Username}}} ->
+ io:format("User ~s (~s) created at ~s~n"
+ "Please check your inbox for a confirmation email.~n"
+ "You must confirm before you are allowed to publish packages.~n",
+ [Username, Email, URL]),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(80)
+ end
+endef
+
+# The $(info ) call inserts a new line after the password prompt.
+hex-user-create: hex-core
+ $(if $(HEX_USERNAME),,$(eval HEX_USERNAME := $(shell read -p "Username: " username; echo $$username)))
+ $(if $(HEX_PASSWORD),,$(eval HEX_PASSWORD := $(shell stty -echo; read -p "Password: " password; stty echo; echo $$password) $(info )))
+ $(if $(HEX_EMAIL),,$(eval HEX_EMAIL := $(shell read -p "Email: " email; echo $$email)))
+ $(gen_verbose) $(call erlang,$(call hex_user_create.erl,$(HEX_USERNAME),$(HEX_PASSWORD),$(HEX_EMAIL)))
+
+define hex_key_add.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => iolist_to_binary([<<"Basic ">>, base64:encode(<<"$(strip $1):$(strip $2)">>)])},
+ Permissions = [
+ case string:split(P, <<":">>) of
+ [D] -> #{domain => D};
+ [D, R] -> #{domain => D, resource => R}
+ end
+ || P <- string:split(<<"$(strip $4)">>, <<",">>, all)],
+ case hex_api_key:add(ConfigF, <<"$(strip $3)">>, Permissions) of
+ {ok, {201, _, #{<<"secret">> := Secret}}} ->
+ io:format("Key ~s created for user ~s~nSecret: ~s~n"
+ "Please store the secret in a secure location, such as a password store.~n"
+ "The secret will be requested for most Hex-related operations.~n",
+ [<<"$(strip $3)">>, <<"$(strip $1)">>, Secret]),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(81)
+ end
+endef
+
+hex-key-add: hex-core
+ $(if $(HEX_USERNAME),,$(eval HEX_USERNAME := $(shell read -p "Username: " username; echo $$username)))
+ $(if $(HEX_PASSWORD),,$(eval HEX_PASSWORD := $(shell stty -echo; read -p "Password: " password; stty echo; echo $$password) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_key_add.erl,$(HEX_USERNAME),$(HEX_PASSWORD),\
+ $(if $(name),$(name),$(shell hostname)-erlang-mk),\
+ $(if $(perm),$(perm),api)))
+
+HEX_TARBALL_EXTRA_METADATA ?=
+
+# @todo Check that we can += files
+HEX_TARBALL_FILES ?= \
+ $(wildcard early-plugins.mk) \
+ $(wildcard ebin/$(PROJECT).app) \
+ $(wildcard ebin/$(PROJECT).appup) \
+ $(wildcard $(notdir $(ERLANG_MK_FILENAME))) \
+ $(sort $(call core_find,include/,*.hrl)) \
+ $(wildcard LICENSE*) \
+ $(wildcard Makefile) \
+ $(wildcard plugins.mk) \
+ $(sort $(call core_find,priv/,*)) \
+ $(wildcard README*) \
+ $(wildcard rebar.config) \
+ $(sort $(call core_find,src/,*))
+
+HEX_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT).tar
+
+# @todo Need to check for rebar.config and/or the absence of DEPS to know
+# whether a project will work with Rebar.
+#
+# @todo contributors licenses links in HEX_TARBALL_EXTRA_METADATA
+
+# In order to build the requirements metadata we look into DEPS.
+# We do not require that the project use Hex dependencies, however
+# Hex.pm does require that the package name and version numbers
+# correspond to a real Hex package.
+define hex_tarball_create.erl
+ Files0 = [$(call comma_list,$(patsubst %,"%",$(HEX_TARBALL_FILES)))],
+ Requirements0 = #{
+ $(foreach d,$(DEPS),
+ <<"$(if $(subst hex,,$(call query_fetch_method,$d)),$d,$(if $(word 3,$(dep_$d)),$(word 3,$(dep_$d)),$d))">> => #{
+ <<"app">> => <<"$d">>,
+ <<"optional">> => false,
+ <<"requirement">> => <<"$(call query_version,$d)">>
+ },)
+ $(if $(DEPS),dummy => dummy)
+ },
+ Requirements = maps:remove(dummy, Requirements0),
+ Metadata0 = #{
+ app => <<"$(strip $(PROJECT))">>,
+ build_tools => [<<"make">>, <<"rebar3">>],
+ description => <<"$(strip $(PROJECT_DESCRIPTION))">>,
+ files => [unicode:characters_to_binary(F) || F <- Files0],
+ name => <<"$(strip $(PROJECT))">>,
+ requirements => Requirements,
+ version => <<"$(strip $(PROJECT_VERSION))">>
+ },
+ Metadata = Metadata0$(HEX_TARBALL_EXTRA_METADATA),
+ Files = [case file:read_file(F) of
+ {ok, Bin} ->
+ {F, Bin};
+ {error, Reason} ->
+ io:format("Error trying to open file ~0p: ~0p~n", [F, Reason]),
+ halt(82)
+ end || F <- Files0],
+ case hex_tarball:create(Metadata, Files) of
+ {ok, #{tarball := Tarball}} ->
+ ok = file:write_file("$(strip $(HEX_TARBALL_OUTPUT_FILE))", Tarball),
+ halt(0);
+ {error, Reason} ->
+ io:format("Error ~0p~n", [Reason]),
+ halt(83)
+ end
+endef
+
+hex_tar_verbose_0 = @echo " TAR $(notdir $(ERLANG_MK_TMP))/$(@F)";
+hex_tar_verbose_2 = set -x;
+hex_tar_verbose = $(hex_tar_verbose_$(V))
+
+$(HEX_TARBALL_OUTPUT_FILE): hex-core app
+ $(hex_tar_verbose) $(call erlang,$(call hex_tarball_create.erl))
+
+hex-tarball-create: $(HEX_TARBALL_OUTPUT_FILE)
+
+define hex_release_publish_summary.erl
+ {ok, Tarball} = erl_tar:open("$(strip $(HEX_TARBALL_OUTPUT_FILE))", [read]),
+ ok = erl_tar:extract(Tarball, [{cwd, "$(ERLANG_MK_TMP)"}, {files, ["metadata.config"]}]),
+ {ok, Metadata} = file:consult("$(ERLANG_MK_TMP)/metadata.config"),
+ #{
+ <<"name">> := Name,
+ <<"version">> := Version,
+ <<"files">> := Files,
+ <<"requirements">> := Deps
+ } = maps:from_list(Metadata),
+ io:format("Publishing ~s ~s~n Dependencies:~n", [Name, Version]),
+ case Deps of
+ [] ->
+ io:format(" (none)~n");
+ _ ->
+ [begin
+ #{<<"app">> := DA, <<"requirement">> := DR} = maps:from_list(D),
+ io:format(" ~s ~s~n", [DA, DR])
+ end || {_, D} <- Deps]
+ end,
+ io:format(" Included files:~n"),
+ [io:format(" ~s~n", [F]) || F <- Files],
+ io:format("You may also review the contents of the tarball file.~n"
+ "Please enter your secret key to proceed.~n"),
+ halt(0)
+endef
+
+define hex_release_publish.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ {ok, Tarball} = file:read_file("$(strip $(HEX_TARBALL_OUTPUT_FILE))"),
+ case hex_api_release:publish(ConfigF, Tarball, [{replace, $2}]) of
+ {ok, {200, _, #{}}} ->
+ io:format("Release replaced~n"),
+ halt(0);
+ {ok, {201, _, #{}}} ->
+ io:format("Release published~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(84)
+ end
+endef
+
+hex-release-tarball: hex-core $(HEX_TARBALL_OUTPUT_FILE)
+ $(verbose) $(call erlang,$(call hex_release_publish_summary.erl))
+
+hex-release-publish: hex-core hex-release-tarball
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_publish.erl,$(HEX_SECRET),false))
+
+hex-release-replace: hex-core hex-release-tarball
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_publish.erl,$(HEX_SECRET),true))
+
+define hex_release_delete.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api_release:delete(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $(PROJECT_VERSION))">>) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $(PROJECT_VERSION)) deleted~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(85)
+ end
+endef
+
+hex-release-delete: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_delete.erl,$(HEX_SECRET)))
+
+define hex_release_retire.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ Params = #{<<"reason">> => <<"$(strip $3)">>, <<"message">> => <<"$(strip $4)">>},
+ case hex_api_release:retire(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $2)">>, Params) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $2) has been retired~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(86)
+ end
+endef
+
+hex-release-retire: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_retire.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION)),\
+ $(if $(HEX_REASON),$(HEX_REASON),invalid),\
+ $(HEX_MESSAGE)))
+
+define hex_release_unretire.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api_release:unretire(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $2)">>) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $2) is not retired anymore~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(87)
+ end
+endef
+
+hex-release-unretire: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_unretire.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION))))
+
+HEX_DOCS_DOC_DIR ?= doc/
+HEX_DOCS_TARBALL_FILES ?= $(sort $(call core_find,$(HEX_DOCS_DOC_DIR),*))
+HEX_DOCS_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT)-docs.tar.gz
+
+$(HEX_DOCS_TARBALL_OUTPUT_FILE): hex-core app docs
+ $(hex_tar_verbose) tar czf $(HEX_DOCS_TARBALL_OUTPUT_FILE) -C $(HEX_DOCS_DOC_DIR) \
+ $(HEX_DOCS_TARBALL_FILES:$(HEX_DOCS_DOC_DIR)%=%)
+
+hex-docs-tarball-create: $(HEX_DOCS_TARBALL_OUTPUT_FILE)
+
+define hex_docs_publish.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ {ok, Tarball} = file:read_file("$(strip $(HEX_DOCS_TARBALL_OUTPUT_FILE))"),
+ case hex_api:post(ConfigF,
+ ["packages", "$(strip $(PROJECT))", "releases", "$(strip $(PROJECT_VERSION))", "docs"],
+ {"application/octet-stream", Tarball}) of
+ {ok, {Status, _, _}} when Status >= 200, Status < 300 ->
+ io:format("Docs published~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(88)
+ end
+endef
+
+hex-docs-publish: hex-core hex-docs-tarball-create
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_docs_publish.erl,$(HEX_SECRET)))
+
+define hex_docs_delete.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api:delete(ConfigF,
+ ["packages", "$(strip $(PROJECT))", "releases", "$(strip $2)", "docs"]) of
+ {ok, {Status, _, _}} when Status >= 200, Status < 300 ->
+ io:format("Docs removed~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(89)
+ end
+endef
+
+hex-docs-delete: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_docs_delete.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION))))
+
# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
# This file is part of erlang.mk and subject to the terms of the ISC License.
diff --git a/deps/rabbitmq_ct_helpers/include/rabbit_assert.hrl b/deps/rabbitmq_ct_helpers/include/rabbit_assert.hrl
new file mode 100644
index 0000000000..e1981ac4b3
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/include/rabbit_assert.hrl
@@ -0,0 +1,49 @@
+-define(AWAIT_MATCH_DEFAULT_POLLING_INTERVAL, 50).
+
+-define(awaitMatch(Guard, Expr, Timeout, PollingInterval),
+ begin
+ ((fun AwaitMatchFilter(AwaitMatchHorizon) ->
+ AwaitMatchResult = Expr,
+ case (AwaitMatchResult) of
+ Guard -> AwaitMatchResult;
+ __V -> case erlang:system_time(millisecond) of
+ AwaitMatchNow when AwaitMatchNow < AwaitMatchHorizon ->
+ timer:sleep(
+ min(PollingInterval,
+ AwaitMatchHorizon - AwaitMatchNow)),
+ AwaitMatchFilter(AwaitMatchHorizon);
+ _ ->
+ erlang:error({awaitMatch,
+ [{module, ?MODULE},
+ {line, ?LINE},
+ {expression, (??Expr)},
+ {pattern, (??Guard)},
+ {value, __V}]})
+ end
+ end
+ end)(erlang:system_time(millisecond) + Timeout))
+ end).
+
+-define(awaitMatch(Guard, Expr, Timeout),
+ begin
+ ((fun AwaitMatchFilter(AwaitMatchHorizon) ->
+ AwaitMatchResult = Expr,
+ case (AwaitMatchResult) of
+ Guard -> AwaitMatchResult;
+ __V -> case erlang:system_time(millisecond) of
+ AwaitMatchNow when AwaitMatchNow < AwaitMatchHorizon ->
+ timer:sleep(
+ min(?AWAIT_MATCH_DEFAULT_POLLING_INTERVAL,
+ AwaitMatchHorizon - AwaitMatchNow)),
+ AwaitMatchFilter(AwaitMatchHorizon);
+ _ ->
+ erlang:error({awaitMatch,
+ [{module, ?MODULE},
+ {line, ?LINE},
+ {expression, (??Expr)},
+ {pattern, (??Guard)},
+ {value, __V}]})
+ end
+ end
+ end)(erlang:system_time(millisecond) + Timeout))
+ end).
diff --git a/deps/rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl b/deps/rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl
new file mode 100644
index 0000000000..47509a6164
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl
@@ -0,0 +1,11 @@
+-include_lib("eunit/include/eunit.hrl").
+
+-define(OK, 200).
+-define(CREATED, 201).
+-define(NO_CONTENT, 204).
+-define(SEE_OTHER, 303).
+-define(BAD_REQUEST, 400).
+-define(NOT_AUTHORISED, 401).
+%%-define(NOT_FOUND, 404). Defined for AMQP by amqp_client.hrl (as 404)
+%% httpc seems to get racy when using HTTP 1.1
+-define(HTTPC_OPTS, [{version, "HTTP/1.0"}, {autoredirect, false}]).
diff --git a/deps/amqp10_common/rabbitmq-components.mk b/deps/rabbitmq_ct_helpers/rabbitmq-components.mk
index b2a3be8b35..52198c0313 100644
--- a/deps/amqp10_common/rabbitmq-components.mk
+++ b/deps/rabbitmq_ct_helpers/rabbitmq-components.mk
@@ -40,8 +40,8 @@ endif
dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+# dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+# dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
@@ -117,7 +117,7 @@ dep_cowlib = hex 2.9.1
dep_jsx = hex 2.11.0
dep_lager = hex 3.8.0
dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
+dep_ra = git https://github.com/rabbitmq/ra.git main
dep_ranch = hex 1.7.1
dep_recon = hex 2.5.1
dep_observer_cli = hex 1.5.4
diff --git a/deps/rabbitmq_ct_helpers/src/rabbit_control_helper.erl b/deps/rabbitmq_ct_helpers/src/rabbit_control_helper.erl
new file mode 100644
index 0000000000..72083f5734
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/src/rabbit_control_helper.erl
@@ -0,0 +1,46 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_control_helper).
+
+-export([command/2, command/3, command/4, command_with_output/4, format_command/4]).
+
+command(Command, Node, Args) ->
+ command(Command, Node, Args, []).
+
+command(Command, Node) ->
+ command(Command, Node, [], []).
+
+command(Command, Node, Args, Opts) ->
+ case command_with_output(Command, Node, Args, Opts) of
+ {ok, _} -> ok;
+ ok -> ok;
+ Error -> Error
+ end.
+
+command_with_output(Command, Node, Args, Opts) ->
+ Formatted = format_command(Command, Node, Args, Opts),
+ CommandResult = 'Elixir.RabbitMQCtl':exec_command(
+ Formatted, fun(Output,_,_) -> Output end),
+ ct:pal("Executed command ~p against node ~p~nResult: ~p~n", [Formatted, Node, CommandResult]),
+ CommandResult.
+
+format_command(Command, Node, Args, Opts) ->
+ Formatted = io_lib:format("~tp ~ts ~ts",
+ [Command,
+ format_args(Args),
+ format_options([{"--node", Node} | Opts])]),
+ 'Elixir.OptionParser':split(iolist_to_binary(Formatted)).
+
+format_args(Args) ->
+ iolist_to_binary([ io_lib:format("~tp ", [Arg]) || Arg <- Args ]).
+
+format_options(Opts) ->
+ EffectiveOpts = [{"--script-name", "rabbitmqctl"} | Opts],
+ iolist_to_binary([io_lib:format("~s=~tp ", [Key, Value])
+ || {Key, Value} <- EffectiveOpts ]).
+
diff --git a/deps/rabbitmq_ct_helpers/src/rabbit_ct_broker_helpers.erl b/deps/rabbitmq_ct_helpers/src/rabbit_ct_broker_helpers.erl
new file mode 100644
index 0000000000..86a33146bb
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/src/rabbit_ct_broker_helpers.erl
@@ -0,0 +1,1921 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_ct_broker_helpers).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("kernel/include/inet.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-export([
+ setup_steps/0,
+ setup_steps_for_vms/0,
+ teardown_steps/0,
+ teardown_steps_for_vms/0,
+ run_make_dist/1,
+ start_rabbitmq_nodes/1,
+ start_rabbitmq_nodes_on_vms/1,
+ stop_rabbitmq_nodes/1,
+ stop_rabbitmq_nodes_on_vms/1,
+ rewrite_node_config_file/2,
+ cluster_nodes/1, cluster_nodes/2,
+
+ get_node_configs/1, get_node_configs/2,
+ get_node_config/2, get_node_config/3, set_node_config/3,
+ nodename_to_index/2,
+ node_uri/2, node_uri/3,
+
+ control_action/2, control_action/3, control_action/4,
+ rabbitmqctl/3, rabbitmqctl/4, rabbitmqctl_list/3,
+ rabbitmq_queues/3,
+
+ add_code_path_to_node/2,
+ add_code_path_to_all_nodes/2,
+ rpc/5, rpc/6,
+ rpc_all/4, rpc_all/5,
+
+ start_node/2,
+ start_broker/2,
+ restart_broker/2,
+ stop_broker/2,
+ restart_node/2,
+ stop_node/2,
+ stop_node_after/3,
+ kill_node/2,
+ kill_node_after/3,
+
+ reset_node/2,
+ force_reset_node/2,
+
+ forget_cluster_node/3,
+ forget_cluster_node/4,
+
+ cluster_members_online/2,
+
+ is_feature_flag_supported/2,
+ is_feature_flag_supported/3,
+ enable_feature_flag/2,
+ enable_feature_flag/3,
+
+ drain_node/2,
+ revive_node/2,
+ mark_as_being_drained/2,
+ unmark_as_being_drained/2,
+ is_being_drained_local_read/2,
+ is_being_drained_local_read/3,
+ is_being_drained_consistent_read/2,
+ is_being_drained_consistent_read/3,
+
+ set_partition_handling_mode/3,
+ set_partition_handling_mode_globally/2,
+ configure_dist_proxy/1,
+ block_traffic_between/2,
+ allow_traffic_between/2,
+
+ get_connection_pids/1,
+ close_all_connections/3,
+
+ set_policy/6,
+ set_policy/7,
+ set_policy_in_vhost/7,
+ set_policy_in_vhost/8,
+
+ clear_policy/3,
+ clear_policy/4,
+ set_operator_policy/6,
+ clear_operator_policy/3,
+ set_ha_policy/4, set_ha_policy/5,
+ set_ha_policy_all/1,
+ set_ha_policy_all/2,
+ set_ha_policy_two_pos/1,
+ set_ha_policy_two_pos_batch_sync/1,
+
+ set_parameter/5,
+ set_parameter/6,
+ set_parameter/7,
+ clear_parameter/4,
+ clear_parameter/5,
+ clear_parameter/6,
+
+ set_global_parameter/3,
+ set_global_parameter/4,
+ clear_global_parameter/2,
+ clear_global_parameter/3,
+
+ add_vhost/2,
+ add_vhost/3,
+ add_vhost/4,
+ delete_vhost/2,
+ delete_vhost/3,
+ delete_vhost/4,
+
+ force_vhost_failure/2,
+ force_vhost_failure/3,
+
+ set_alarm/3,
+ get_alarms/2,
+ get_local_alarms/2,
+ clear_alarm/3,
+ clear_all_alarms/2,
+
+ add_user/2,
+ add_user/3,
+ add_user/4,
+ add_user/5,
+ set_user_tags/4,
+ set_user_tags/5,
+
+ delete_user/2,
+ delete_user/3,
+ delete_user/4,
+
+ change_password/5,
+ clear_password/4,
+
+ change_password/3,
+
+ switch_credential_validator/2,
+ switch_credential_validator/3,
+
+ set_permissions/6,
+ set_permissions/7,
+ set_permissions/8,
+ set_full_permissions/2,
+ set_full_permissions/3,
+ set_full_permissions/4,
+
+ clear_permissions/2,
+ clear_permissions/3,
+ clear_permissions/4,
+ clear_permissions/5,
+
+ set_vhost_limit/5,
+
+ set_user_limits/3,
+ set_user_limits/4,
+ clear_user_limits/3,
+ clear_user_limits/4,
+
+ enable_plugin/3,
+ disable_plugin/3,
+
+ test_channel/0,
+ test_writer/1,
+ user/1
+ ]).
+
+%% Internal functions exported to be used by rpc:call/4.
+-export([
+ do_restart_broker/0
+ ]).
+
+-define(DEFAULT_USER, "guest").
+-define(NODE_START_ATTEMPTS, 10).
+
+-define(TCP_PORTS_BASE, 21000).
+-define(TCP_PORTS_LIST, [
+ tcp_port_amqp,
+ tcp_port_amqp_tls,
+ tcp_port_mgmt,
+ tcp_port_erlang_dist,
+ tcp_port_erlang_dist_proxy,
+ tcp_port_mqtt,
+ tcp_port_mqtt_tls,
+ tcp_port_web_mqtt,
+ tcp_port_stomp,
+ tcp_port_stomp_tls,
+ tcp_port_web_stomp,
+ tcp_port_web_stomp_tls,
+ tcp_port_stream,
+ tcp_port_stream_tls,
+ tcp_port_prometheus
+ ]).
+
+%% -------------------------------------------------------------------
+%% Broker setup/teardown steps.
+%% -------------------------------------------------------------------
+
+setup_steps() ->
+ case os:getenv("RABBITMQ_RUN") of
+ false ->
+ [
+ fun run_make_dist/1,
+ fun rabbit_ct_helpers:ensure_rabbitmqctl_cmd/1,
+ fun rabbit_ct_helpers:ensure_rabbitmqctl_app/1,
+ fun rabbit_ct_helpers:ensure_rabbitmq_plugins_cmd/1,
+ fun set_lager_flood_limit/1,
+ fun start_rabbitmq_nodes/1,
+ fun share_dist_and_proxy_ports_map/1
+ ];
+ _ ->
+ [
+ fun rabbit_ct_helpers:ensure_rabbitmqctl_cmd/1,
+ fun rabbit_ct_helpers:load_rabbitmqctl_app/1,
+ fun rabbit_ct_helpers:ensure_rabbitmq_plugins_cmd/1,
+ fun set_lager_flood_limit/1,
+ fun start_rabbitmq_nodes/1,
+ fun share_dist_and_proxy_ports_map/1
+ ]
+ end.
+
+teardown_steps() ->
+ [
+ fun stop_rabbitmq_nodes/1
+ ].
+
+setup_steps_for_vms() ->
+ [
+ fun rabbit_ct_helpers:ensure_rabbitmqctl_cmd/1,
+ fun rabbit_ct_helpers:ensure_rabbitmqctl_app/1,
+ fun rabbit_ct_helpers:ensure_rabbitmq_plugins_cmd/1,
+ fun start_rabbitmq_nodes_on_vms/1,
+ fun maybe_cluster_nodes/1
+ ].
+
+teardown_steps_for_vms() ->
+ [
+ fun stop_rabbitmq_nodes_on_vms/1
+ ].
+
+run_make_dist(Config) ->
+ LockId = {make_dist, self()},
+ global:set_lock(LockId, [node()]),
+ case os:getenv("SKIP_MAKE_TEST_DIST") of
+ false ->
+ SrcDir = ?config(current_srcdir, Config),
+ case rabbit_ct_helpers:make(Config, SrcDir, ["test-dist"]) of
+ {ok, _} ->
+ %% The caller can set $SKIP_MAKE_TEST_DIST to
+ %% manually skip this step which can be time
+ %% consuming. But we also use this variable to
+ %% record the fact we went through it already so we
+ %% save redundant calls.
+ os:putenv("SKIP_MAKE_TEST_DIST", "true"),
+ global:del_lock(LockId, [node()]),
+ Config;
+ _ ->
+ global:del_lock(LockId, [node()]),
+ {skip, "Failed to run \"make test-dist\""}
+ end;
+ _ ->
+ global:del_lock(LockId, [node()]),
+ ct:pal(?LOW_IMPORTANCE, "(skip `$MAKE test-dist`)", []),
+ Config
+ end.
+
+set_lager_flood_limit(Config) ->
+ rabbit_ct_helpers:merge_app_env(Config,
+ {lager, [{error_logger_hwm, 10000}]}).
+
+start_rabbitmq_nodes_on_vms(Config) ->
+ ConfigsPerVM = configs_per_vm(Config),
+ start_rabbitmq_nodes_on_vms(Config, ConfigsPerVM, []).
+
+start_rabbitmq_nodes_on_vms(Config, [{Node, C} | Rest], NodeConfigsList) ->
+ Config1 = rabbit_ct_helpers:set_config(Config, {rmq_nodes_clustered, false}),
+ Ret = rabbit_ct_vm_helpers:rpc(Config1,
+ Node,
+ ?MODULE,
+ start_rabbitmq_nodes,
+ [C]),
+ case Ret of
+ {skip, _} = Error ->
+ Error;
+ _ ->
+ NodeConfigs = get_node_configs(Ret),
+ start_rabbitmq_nodes_on_vms(Config, Rest,
+ [NodeConfigs | NodeConfigsList])
+ end;
+start_rabbitmq_nodes_on_vms(Config, [], NodeConfigsList) ->
+ merge_node_configs(Config, lists:reverse(NodeConfigsList)).
+
+start_rabbitmq_nodes(Config) ->
+ Config0 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_username, list_to_binary(?DEFAULT_USER)},
+ {rmq_password, list_to_binary(?DEFAULT_USER)},
+ {rmq_vhost, <<"/">>},
+ {rmq_channel_max, 0}]),
+ Config1 = case rabbit_ct_helpers:get_config(Config0, rmq_hostname) of
+ undefined ->
+ rabbit_ct_helpers:set_config(
+ Config0, {rmq_hostname, "localhost"});
+ _ ->
+ Config0
+ end,
+ NodesCount = get_nodes_count(Config1),
+ Clustered0 = rabbit_ct_helpers:get_config(Config1, rmq_nodes_clustered),
+ Clustered = case Clustered0 of
+ undefined -> true;
+ C when is_boolean(C) -> C
+ end,
+ Master = self(),
+ Starters = [
+ spawn_link(fun() -> start_rabbitmq_node(Master, Config1, [], I) end)
+ || I <- lists:seq(0, NodesCount - 1)
+ ],
+ wait_for_rabbitmq_nodes(Config1, Starters, [], Clustered).
+
+get_nodes_count(Config) ->
+ NodesCount = rabbit_ct_helpers:get_config(Config, rmq_nodes_count),
+ case NodesCount of
+ undefined -> 1;
+ N when is_integer(N) andalso N >= 1 -> N;
+ L when is_list(L) andalso length(L) >= 1 -> length(L)
+ end.
+
+set_nodes_count(Config, NodesCount) ->
+ rabbit_ct_helpers:set_config(Config, {rmq_nodes_count, NodesCount}).
+
+configs_per_vm(Config) ->
+ CTPeers = rabbit_ct_vm_helpers:get_ct_peers(Config),
+ NodesCount = get_nodes_count(Config),
+ InstanceCount = length(CTPeers),
+ NodesPerVM = NodesCount div InstanceCount,
+ Remaining = NodesCount rem InstanceCount,
+ configs_per_vm(CTPeers, Config, [], NodesPerVM, Remaining).
+
+configs_per_vm([CTPeer | Rest], Config, ConfigsPerVM, NodesPerVM, Remaining) ->
+ Hostname = rabbit_ct_helpers:nodename_to_hostname(CTPeer),
+ Config0 = rabbit_ct_helpers:set_config(Config, {rmq_hostname, Hostname}),
+ NodesCount = if
+ Remaining > 0 -> NodesPerVM + 1;
+ true -> NodesPerVM
+ end,
+ if
+ NodesCount > 0 ->
+ Config1 = set_nodes_count(Config0, NodesCount),
+ configs_per_vm(Rest, Config, [{CTPeer, Config1} | ConfigsPerVM],
+ NodesPerVM, Remaining - 1);
+ true ->
+ configs_per_vm(Rest, Config, ConfigsPerVM,
+ NodesPerVM, Remaining)
+ end;
+configs_per_vm([], _, ConfigsPerVM, _, _) ->
+ lists:reverse(ConfigsPerVM).
+
+merge_node_configs(Config, NodeConfigsList) ->
+ merge_node_configs(Config, NodeConfigsList, []).
+
+merge_node_configs(Config, [], MergedNodeConfigs) ->
+ rabbit_ct_helpers:set_config(Config, {rmq_nodes, MergedNodeConfigs});
+merge_node_configs(Config, NodeConfigsList, MergedNodeConfigs) ->
+ HeadsAndTails = [{H, T} || [H | T] <- NodeConfigsList],
+ Heads = [H || {H, _} <- HeadsAndTails],
+ Tails = [T || {_, T} <- HeadsAndTails],
+ merge_node_configs(Config, Tails, MergedNodeConfigs ++ Heads).
+
+wait_for_rabbitmq_nodes(Config, [], NodeConfigs, Clustered) ->
+ NodeConfigs1 = [NC || {_, NC} <- lists:keysort(1, NodeConfigs)],
+ Config1 = rabbit_ct_helpers:set_config(Config, {rmq_nodes, NodeConfigs1}),
+ if
+ Clustered -> cluster_nodes(Config1);
+ true -> Config1
+ end;
+wait_for_rabbitmq_nodes(Config, Starting, NodeConfigs, Clustered) ->
+ receive
+ {_, {skip, _} = Error} ->
+ NodeConfigs1 = [NC || {_, NC} <- NodeConfigs],
+ Config1 = rabbit_ct_helpers:set_config(Config,
+ {rmq_nodes, NodeConfigs1}),
+ stop_rabbitmq_nodes(Config1),
+ Error;
+ {Pid, I, NodeConfig} when NodeConfigs =:= [] ->
+ wait_for_rabbitmq_nodes(Config, Starting -- [Pid],
+ [{I, NodeConfig} | NodeConfigs], Clustered);
+ {Pid, I, NodeConfig} ->
+ wait_for_rabbitmq_nodes(Config, Starting -- [Pid],
+ [{I, NodeConfig} | NodeConfigs], Clustered)
+ end.
+
+%% To start a RabbitMQ node, we need to:
+%% 1. Pick TCP port numbers
+%% 2. Generate a node name
+%% 3. Write a configuration file
+%% 4. Start the node
+%%
+%% If this fails (usually because the node name is taken or a TCP port
+%% is already in use), we start again with another set of TCP ports. The
+%% node name is derived from the AMQP TCP port so a new node name is
+%% generated.
+
+start_rabbitmq_node(Master, Config, NodeConfig, I) ->
+ Attempts0 = rabbit_ct_helpers:get_config(NodeConfig, failed_boot_attempts),
+ Attempts = case Attempts0 of
+ undefined -> 0;
+ N -> N
+ end,
+ NodeConfig1 = init_tcp_port_numbers(Config, NodeConfig, I),
+ NodeConfig2 = init_nodename(Config, NodeConfig1, I),
+ NodeConfig3 = init_config_filename(Config, NodeConfig2, I),
+ Steps = [
+ fun write_config_file/3,
+ fun do_start_rabbitmq_node/3
+ ],
+ case run_node_steps(Config, NodeConfig3, I, Steps) of
+ {skip, _} = Error
+ when Attempts >= ?NODE_START_ATTEMPTS ->
+ %% It's unlikely we'll ever succeed to start RabbitMQ.
+ Master ! {self(), Error},
+ unlink(Master);
+ {skip, _} ->
+ %% Try again with another TCP port numbers base.
+ NodeConfig4 = move_nonworking_nodedir_away(NodeConfig3),
+ NodeConfig5 = rabbit_ct_helpers:set_config(NodeConfig4,
+ {failed_boot_attempts, Attempts + 1}),
+ start_rabbitmq_node(Master, Config, NodeConfig5, I);
+ NodeConfig4 ->
+ Master ! {self(), I, NodeConfig4},
+ unlink(Master)
+ end.
+
+run_node_steps(Config, NodeConfig, I, [Step | Rest]) ->
+ case Step(Config, NodeConfig, I) of
+ {skip, _} = Error -> Error;
+ NodeConfig1 -> run_node_steps(Config, NodeConfig1, I, Rest)
+ end;
+run_node_steps(_, NodeConfig, _, []) ->
+ NodeConfig.
+
+init_tcp_port_numbers(Config, NodeConfig, I) ->
+ %% If there is no TCP port numbers base previously calculated,
+ %% use the TCP port 21000. If a base was previously calculated,
+ %% increment it by the number of TCP ports we may open.
+ %%
+ %% Port 21000 is an arbitrary choice. We don't want to use the
+ %% default AMQP port of 5672 so other AMQP clients on the same host
+ %% do not accidentally use the testsuite broker. There seems to be
+ %% no registered service around this port in /etc/services. And it
+ %% should be far enough away from the default ephemeral TCP ports
+ %% range.
+ ExtraPorts = case rabbit_ct_helpers:get_config(Config, rmq_extra_tcp_ports) of
+ undefined -> [];
+ EP when is_list(EP) -> EP
+ end,
+ PortsCount = length(?TCP_PORTS_LIST) + length(ExtraPorts),
+ Base = case rabbit_ct_helpers:get_config(NodeConfig, tcp_ports_base) of
+ undefined -> tcp_port_base_for_broker(Config, I, PortsCount);
+ P -> P + PortsCount
+ end,
+ NodeConfig1 = rabbit_ct_helpers:set_config(NodeConfig,
+ {tcp_ports_base, Base}),
+ %% Now, compute all TCP port numbers from this base.
+ {NodeConfig2, _} = lists:foldl(
+ fun(PortName, {NewConfig, NextPort}) ->
+ {
+ rabbit_ct_helpers:set_config(NewConfig, {PortName, NextPort}),
+ NextPort + 1
+ }
+ end,
+ {NodeConfig1, Base}, ?TCP_PORTS_LIST ++ ExtraPorts),
+ %% Finally, update the RabbitMQ configuration with the computed TCP
+ %% port numbers. Extra TCP ports are not added automatically to the
+ %% configuration.
+ update_tcp_ports_in_rmq_config(NodeConfig2, ?TCP_PORTS_LIST).
+
+tcp_port_base_for_broker(Config, I, PortsCount) ->
+ Base = case rabbit_ct_helpers:get_config(Config, tcp_ports_base) of
+ undefined ->
+ ?TCP_PORTS_BASE;
+ {skip_n_nodes, N} ->
+ tcp_port_base_for_broker1(?TCP_PORTS_BASE, N, PortsCount);
+ B ->
+ B
+ end,
+ tcp_port_base_for_broker1(Base, I, PortsCount).
+
+tcp_port_base_for_broker1(Base, I, PortsCount) ->
+ Base + I * PortsCount * ?NODE_START_ATTEMPTS.
+
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_amqp = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbit, [{tcp_listeners, [?config(Key, NodeConfig)]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_amqp_tls = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbit, [{ssl_listeners, [?config(Key, NodeConfig)]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_mgmt = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_management, [{tcp_config, [{port, ?config(Key, NodeConfig)}]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_mqtt = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_mqtt, [{tcp_listeners, [?config(Key, NodeConfig)]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_mqtt_tls = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_mqtt, [{ssl_listeners, [?config(Key, NodeConfig)]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_web_mqtt = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_web_mqtt, [{tcp_config, [{port, ?config(Key, NodeConfig)}]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_web_stomp = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_web_stomp, [{tcp_config, [{port, ?config(Key, NodeConfig)}]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_web_stomp_tls | Rest]) ->
+ %% Skip this one, because we need more than just a port to configure
+ update_tcp_ports_in_rmq_config(NodeConfig, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_stomp = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_stomp, [{tcp_listeners, [?config(Key, NodeConfig)]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_stomp_tls = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_stomp, [{ssl_listeners, [?config(Key, NodeConfig)]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_stream = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_stream, [{tcp_listeners, [?config(Key, NodeConfig)]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_stream_tls = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_stream, [{ssl_listeners, [?config(Key, NodeConfig)]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_erlang_dist | Rest]) ->
+ %% The Erlang distribution port doesn't appear in the configuration file.
+ update_tcp_ports_in_rmq_config(NodeConfig, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_erlang_dist_proxy | Rest]) ->
+ %% inet_proxy_dist port doesn't appear in the configuration file.
+ update_tcp_ports_in_rmq_config(NodeConfig, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, [tcp_port_prometheus = Key | Rest]) ->
+ NodeConfig1 = rabbit_ct_helpers:merge_app_env(NodeConfig,
+ {rabbitmq_prometheus, [{tcp_config, [{port, ?config(Key, NodeConfig)}]}]}),
+ update_tcp_ports_in_rmq_config(NodeConfig1, Rest);
+update_tcp_ports_in_rmq_config(NodeConfig, []) ->
+ NodeConfig.
+
+init_nodename(Config, NodeConfig, I) ->
+ Hostname = ?config(rmq_hostname, Config),
+ Nodename0 = case rabbit_ct_helpers:get_config(Config, rmq_nodes_count) of
+ NodesList when is_list(NodesList) ->
+ Name = lists:nth(I + 1, NodesList),
+ rabbit_misc:format("~s@~s", [Name, Hostname]);
+ _ ->
+ Base = ?config(tcp_ports_base, NodeConfig),
+ Suffix0 = rabbit_ct_helpers:get_config(Config,
+ rmq_nodename_suffix),
+ Suffix = case Suffix0 of
+ undefined -> "";
+ _ when is_atom(Suffix0) -> [$- | atom_to_list(Suffix0)];
+ _ -> [$- | Suffix0]
+ end,
+ rabbit_misc:format("rmq-ct~s-~b-~b@~s",
+ [Suffix, I + 1, Base, Hostname])
+ end,
+ Nodename = list_to_atom(Nodename0),
+ rabbit_ct_helpers:set_config(NodeConfig, [
+ {nodename, Nodename},
+ {initial_nodename, Nodename}
+ ]).
+
+init_config_filename(Config, NodeConfig, _I) ->
+ PrivDir = ?config(priv_dir, Config),
+ Nodename = ?config(nodename, NodeConfig),
+ ConfigDir = filename:join(PrivDir, Nodename),
+ ConfigFile = filename:join(ConfigDir, Nodename),
+ rabbit_ct_helpers:set_config(NodeConfig,
+ {erlang_node_config_filename, ConfigFile}).
+
+write_config_file(Config, NodeConfig, _I) ->
+ %% Prepare a RabbitMQ configuration.
+ ErlangConfigBase = ?config(erlang_node_config, Config),
+ ErlangConfigOverlay = ?config(erlang_node_config, NodeConfig),
+ ErlangConfig = rabbit_ct_helpers:merge_app_env_in_erlconf(ErlangConfigBase,
+ ErlangConfigOverlay),
+ ConfigFile = ?config(erlang_node_config_filename, NodeConfig),
+ ConfigDir = filename:dirname(ConfigFile),
+ Ret1 = file:make_dir(ConfigDir),
+ Ret2 = file:write_file(ConfigFile ++ ".config",
+ rabbit_ct_helpers:convert_to_unicode_binary(
+ io_lib:format("% vim:ft=erlang:~n~n~p.~n", [ErlangConfig]))),
+ case {Ret1, Ret2} of
+ {ok, ok} ->
+ NodeConfig;
+ {{error, eexist}, ok} ->
+ NodeConfig;
+ {{error, Reason}, _} when Reason =/= eexist ->
+ {skip, "Failed to create Erlang node config directory \"" ++
+ ConfigDir ++ "\": " ++ file:format_error(Reason)};
+ {_, {error, Reason}} ->
+ {skip, "Failed to create Erlang node config file \"" ++
+ ConfigFile ++ "\": " ++ file:format_error(Reason)}
+ end.
+
+do_start_rabbitmq_node(Config, NodeConfig, I) ->
+ WithPlugins0 = rabbit_ct_helpers:get_config(Config,
+ broker_with_plugins),
+ WithPlugins = case is_list(WithPlugins0) of
+ true -> lists:nth(I + 1, WithPlugins0);
+ false -> WithPlugins0
+ end,
+ CanUseSecondary = (I + 1) rem 2 =:= 0,
+ UseSecondaryUmbrella = case ?config(secondary_umbrella, Config) of
+ false -> false;
+ _ -> CanUseSecondary
+ end,
+ SrcDir = case WithPlugins of
+ false when UseSecondaryUmbrella -> ?config(secondary_rabbit_srcdir,
+ Config);
+ false -> ?config(rabbit_srcdir, Config);
+ _ when UseSecondaryUmbrella -> ?config(secondary_current_srcdir,
+ Config);
+ _ -> ?config(current_srcdir, Config)
+ end,
+ PrivDir = ?config(priv_dir, Config),
+ Nodename = ?config(nodename, NodeConfig),
+ InitialNodename = ?config(initial_nodename, NodeConfig),
+ DistPort = ?config(tcp_port_erlang_dist, NodeConfig),
+ ConfigFile = ?config(erlang_node_config_filename, NodeConfig),
+ %% Use inet_proxy_dist to handle distribution. This is used by the
+ %% partitions testsuite.
+ DistMod = rabbit_ct_helpers:get_config(Config, erlang_dist_module),
+ StartArgs0 = case DistMod of
+ undefined ->
+ "";
+ _ ->
+ DistModS = atom_to_list(DistMod),
+ DistModPath = filename:absname(
+ filename:dirname(code:where_is_file(DistModS ++ ".beam"))),
+ DistArg = re:replace(DistModS, "_dist$", "", [{return, list}]),
+ "-pa \"" ++ DistModPath ++ "\" -proto_dist " ++ DistArg
+ end,
+ %% Set the net_ticktime.
+ CurrentTicktime = case net_kernel:get_net_ticktime() of
+ {ongoing_change_to, T} -> T;
+ T -> T
+ end,
+ StartArgs1 = case rabbit_ct_helpers:get_config(Config, net_ticktime) of
+ undefined ->
+ case CurrentTicktime of
+ 60 -> ok;
+ _ -> net_kernel:set_net_ticktime(60)
+ end,
+ StartArgs0;
+ Ticktime ->
+ case CurrentTicktime of
+ Ticktime -> ok;
+ _ -> net_kernel:set_net_ticktime(Ticktime)
+ end,
+ StartArgs0 ++ " -kernel net_ticktime " ++ integer_to_list(Ticktime)
+ end,
+ ExtraArgs0 = [],
+ ExtraArgs1 = case rabbit_ct_helpers:get_config(Config, rmq_plugins_dir) of
+ undefined ->
+ ExtraArgs0;
+ ExtraPluginsDir ->
+ [{"EXTRA_PLUGINS_DIR=~s", [ExtraPluginsDir]}
+ | ExtraArgs0]
+ end,
+ StartWithPluginsDisabled = rabbit_ct_helpers:get_config(
+ Config, start_rmq_with_plugins_disabled),
+ ExtraArgs2 = case StartWithPluginsDisabled of
+ true -> ["LEAVE_PLUGINS_DISABLED=yes" | ExtraArgs1];
+ _ -> ExtraArgs1
+ end,
+ KeepPidFile = rabbit_ct_helpers:get_config(
+ Config, keep_pid_file_on_exit),
+ ExtraArgs3 = case KeepPidFile of
+ true -> ["RABBITMQ_KEEP_PID_FILE_ON_EXIT=yes" | ExtraArgs2];
+ _ -> ExtraArgs2
+ end,
+ ExtraArgs4 = case WithPlugins of
+ false -> ExtraArgs3;
+ _ -> ["NOBUILD=1" | ExtraArgs3]
+ end,
+ ExtraArgs = case UseSecondaryUmbrella of
+ true ->
+ DepsDir = ?config(erlang_mk_depsdir, Config),
+ ErlLibs = os:getenv("ERL_LIBS"),
+ SecDepsDir = ?config(secondary_erlang_mk_depsdir,
+ Config),
+ SecErlLibs = lists:flatten(
+ string:replace(ErlLibs,
+ DepsDir,
+ SecDepsDir,
+ all)),
+ SecNewScriptsDir = filename:join([SecDepsDir,
+ SrcDir,
+ "sbin"]),
+ SecOldScriptsDir = filename:join([SecDepsDir,
+ "rabbit",
+ "scripts"]),
+ SecNewScriptsDirExists = filelib:is_dir(
+ SecNewScriptsDir),
+ SecScriptsDir = case SecNewScriptsDirExists of
+ true -> SecNewScriptsDir;
+ false -> SecOldScriptsDir
+ end,
+ [{"DEPS_DIR=~s", [SecDepsDir]},
+ {"REBAR_DEPS_DIR=~s", [SecDepsDir]},
+ {"ERL_LIBS=~s", [SecErlLibs]},
+ {"RABBITMQ_SCRIPTS_DIR=~s", [SecScriptsDir]},
+ {"RABBITMQ_SERVER=~s/rabbitmq-server", [SecScriptsDir]},
+ {"RABBITMQCTL=~s/rabbitmqctl", [SecScriptsDir]},
+ {"RABBITMQ_PLUGINS=~s/rabbitmq-plugins", [SecScriptsDir]}
+ | ExtraArgs4];
+ false ->
+ ExtraArgs4
+ end,
+ MakeVars = [
+ {"RABBITMQ_NODENAME=~s", [Nodename]},
+ {"RABBITMQ_NODENAME_FOR_PATHS=~s", [InitialNodename]},
+ {"RABBITMQ_DIST_PORT=~b", [DistPort]},
+ {"RABBITMQ_CONFIG_FILE=~s", [ConfigFile]},
+ {"RABBITMQ_SERVER_START_ARGS=~s", [StartArgs1]},
+ "RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS=+S 2 +sbwt very_short +A 24",
+ "RABBITMQ_LOG=debug",
+ "RMQCTL_WAIT_TIMEOUT=180",
+ {"TEST_TMPDIR=~s", [PrivDir]}
+ | ExtraArgs],
+ Cmd = ["start-background-broker" | MakeVars],
+ case rabbit_ct_helpers:get_config(Config, rabbitmq_run_cmd) of
+ undefined ->
+ case rabbit_ct_helpers:make(Config, SrcDir, Cmd) of
+ {ok, _} ->
+ NodeConfig1 = rabbit_ct_helpers:set_config(
+ NodeConfig,
+ [{effective_srcdir, SrcDir},
+ {make_vars_for_node_startup, MakeVars}]),
+ query_node(Config, NodeConfig1);
+ _ ->
+ AbortCmd = ["stop-node" | MakeVars],
+ _ = rabbit_ct_helpers:make(Config, SrcDir, AbortCmd),
+ {skip, "Failed to initialize RabbitMQ"}
+ end;
+ RunCmd ->
+ UseSecondary = CanUseSecondary andalso
+ rabbit_ct_helpers:get_config(Config, rabbitmq_run_secondary_cmd) =/= undefined,
+ EnabledPluginsMakeVars = case {UseSecondary, WithPlugins} of
+ {_, false} ->
+ ["RABBITMQ_ENABLED_PLUGINS=rabbit"];
+ {true, _} ->
+ [{"RABBITMQ_ENABLED_PLUGINS=~s", [filename:basename(SrcDir)]}];
+ _ ->
+ []
+ end,
+ RmqRun = case CanUseSecondary of
+ false -> RunCmd;
+ _ -> rabbit_ct_helpers:get_config(Config, rabbitmq_run_secondary_cmd, RunCmd)
+ end,
+ case rabbit_ct_helpers:exec([RmqRun, "-C", SrcDir] ++ EnabledPluginsMakeVars ++ Cmd) of
+ {ok, _} ->
+ NodeConfig1 = rabbit_ct_helpers:set_config(
+ NodeConfig,
+ [{make_vars_for_node_startup, MakeVars}]),
+ query_node(Config, NodeConfig1);
+ _ ->
+ AbortCmd = ["stop-node" | MakeVars],
+ _ = rabbit_ct_helpers:exec([RunCmd | AbortCmd]),
+ {skip, "Failed to initialize RabbitMQ"}
+ end
+ end.
+
+query_node(Config, NodeConfig) ->
+ Nodename = ?config(nodename, NodeConfig),
+ PidFile = rpc(Config, Nodename, os, getenv, ["RABBITMQ_PID_FILE"]),
+ MnesiaDir = rpc(Config, Nodename, mnesia, system_info, [directory]),
+ {ok, PluginsDir} = rpc(Config, Nodename, application, get_env,
+ [rabbit, plugins_dir]),
+ {ok, EnabledPluginsFile} = rpc(Config, Nodename, application, get_env,
+ [rabbit, enabled_plugins_file]),
+ Vars0 = [{pid_file, PidFile},
+ {mnesia_dir, MnesiaDir},
+ {plugins_dir, PluginsDir},
+ {enabled_plugins_file, EnabledPluginsFile}],
+ Vars = try
+ EnabledFeatureFlagsFile = rpc(Config, Nodename,
+ rabbit_feature_flags,
+ enabled_feature_flags_list_file,
+ []),
+ [{enabled_feature_flags_list_file, EnabledFeatureFlagsFile}
+ | Vars0]
+ catch
+ exit:{undef, [{rabbit_feature_flags, _, _, _} | _]} ->
+ %% This happens if the queried node is a RabbitMQ
+ %% 3.7.x node. If this is the case, we can ignore
+ %% this and leave the `enabled_plugins_file` config
+ %% variable unset.
+ ct:pal("NO RABBITMQ_FEATURE_FLAGS_FILE"),
+ Vars0
+ end,
+ rabbit_ct_helpers:set_config(NodeConfig, Vars).
+
+maybe_cluster_nodes(Config) ->
+ Clustered0 = rabbit_ct_helpers:get_config(Config, rmq_nodes_clustered),
+ Clustered = case Clustered0 of
+ undefined -> true;
+ C when is_boolean(C) -> C
+ end,
+ if
+ Clustered -> cluster_nodes(Config);
+ true -> Config
+ end.
+
+cluster_nodes(Config) ->
+ [NodeConfig1 | NodeConfigs] = get_node_configs(Config),
+ cluster_nodes1(Config, NodeConfig1, NodeConfigs).
+
+cluster_nodes(Config, Nodes) ->
+ [NodeConfig1 | NodeConfigs] = [
+ get_node_config(Config, Node) || Node <- Nodes],
+ cluster_nodes1(Config, NodeConfig1, NodeConfigs).
+
+cluster_nodes1(Config, NodeConfig1, [NodeConfig2 | Rest]) ->
+ case cluster_nodes(Config, NodeConfig2, NodeConfig1) of
+ ok -> cluster_nodes1(Config, NodeConfig1, Rest);
+ Error -> Error
+ end;
+cluster_nodes1(Config, _, []) ->
+ Config.
+
+cluster_nodes(Config, NodeConfig1, NodeConfig2) ->
+ Nodename1 = ?config(nodename, NodeConfig1),
+ Nodename2 = ?config(nodename, NodeConfig2),
+ Cmds = [
+ ["stop_app"],
+ ["join_cluster", Nodename2],
+ ["start_app"]
+ ],
+ cluster_nodes1(Config, Nodename1, Nodename2, Cmds).
+
+cluster_nodes1(Config, Nodename1, Nodename2, [Cmd | Rest]) ->
+ case rabbitmqctl(Config, Nodename1, Cmd) of
+ {ok, _} -> cluster_nodes1(Config, Nodename1, Nodename2, Rest);
+ _ -> {skip,
+ "Failed to cluster nodes \"" ++ atom_to_list(Nodename1) ++
+ "\" and \"" ++ atom_to_list(Nodename2) ++ "\""}
+ end;
+cluster_nodes1(_, _, _, []) ->
+ ok.
+
+handle_nodes_in_parallel(NodeConfigs, Fun) ->
+ T0 = erlang:timestamp(),
+ Parent = self(),
+ Procs = [
+ begin
+ timer:sleep(rand:uniform(1000)),
+ spawn_link(fun() ->
+ T1 = erlang:timestamp(),
+ Ret = Fun(NodeConfig),
+ T2 = erlang:timestamp(),
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Time to run ~p for node ~s: ~b µs",
+ [Fun,
+ ?config(nodename, NodeConfig),
+ timer:now_diff(T2, T1)]),
+ Parent ! {parallel_handling_ret,
+ self(),
+ NodeConfig,
+ Ret}
+ end) end
+ || NodeConfig <- NodeConfigs
+ ],
+ wait_for_node_handling(Procs, Fun, T0, []).
+
+wait_for_node_handling([], Fun, T0, Results) ->
+ T3 = erlang:timestamp(),
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Time to run ~p for all nodes: ~b µs",
+ [Fun, timer:now_diff(T3, T0)]),
+ Results;
+wait_for_node_handling(Procs, Fun, T0, Results) ->
+ receive
+ {parallel_handling_ret, Proc, NodeConfig, Ret} ->
+ Results1 = [{NodeConfig, Ret} | Results],
+ wait_for_node_handling(Procs -- [Proc], Fun, T0, Results1)
+ end.
+
+move_nonworking_nodedir_away(NodeConfig) ->
+ ConfigFile = ?config(erlang_node_config_filename, NodeConfig),
+ ConfigDir = filename:dirname(ConfigFile),
+ case os:getenv("RABBITMQ_CT_HELPERS_DELETE_UNUSED_NODES") =/= false
+ andalso ?OTP_RELEASE >= 23 of
+ true ->
+ file:del_dir_r(ConfigDir);
+ _ ->
+ NewName = filename:join(
+ filename:dirname(ConfigDir),
+ "_unused_nodedir_" ++ filename:basename(ConfigDir)),
+ file:rename(ConfigDir, NewName)
+ end,
+ lists:keydelete(erlang_node_config_filename, 1, NodeConfig).
+
+share_dist_and_proxy_ports_map(Config) ->
+ Map = [
+ {
+ ?config(tcp_port_erlang_dist, NodeConfig),
+ ?config(tcp_port_erlang_dist_proxy, NodeConfig)
+ } || NodeConfig <- get_node_configs(Config)],
+ rpc_all(Config,
+ application, set_env, [kernel, dist_and_proxy_ports_map, Map]),
+ Config.
+
+rewrite_node_config_file(Config, Node) ->
+ NodeConfig = get_node_config(Config, Node),
+ I = if
+ is_integer(Node) -> Node;
+ true -> nodename_to_index(Config, Node)
+ end,
+ %% Keep copies of previous config file.
+ ConfigFile = ?config(erlang_node_config_filename, NodeConfig),
+ case rotate_config_file(ConfigFile) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ ct:pal("Failed to rotate config file ~s: ~s",
+ [ConfigFile, file:format_error(Reason)])
+ end,
+ %% Now we can write the new file. The caller is responsible for
+ %% restarting the broker/node.
+ case write_config_file(Config, NodeConfig, I) of
+ {skip, Error} -> {error, Error};
+ _NodeConfig1 -> ok
+ end.
+
+rotate_config_file(ConfigFile) ->
+ rotate_config_file(ConfigFile, ConfigFile ++ ".config", 1).
+
+rotate_config_file(ConfigFile, OldName, Ext) ->
+ NewName = rabbit_misc:format("~s.config.~b", [ConfigFile, Ext]),
+ case filelib:is_file(NewName) of
+ true ->
+ case rotate_config_file(ConfigFile, NewName, Ext + 1) of
+ ok -> file:rename(OldName, NewName);
+ Error -> Error
+ end;
+ false ->
+ file:rename(OldName, NewName)
+ end.
+
+stop_rabbitmq_nodes_on_vms(Config) ->
+ NodeConfigs = get_node_configs(Config),
+ NodeConfigsPerCTPeer = [
+ {
+ rabbit_ct_helpers:nodename_to_hostname(CTPeer),
+ CTPeer,
+ []
+ }
+ || CTPeer <-
+ rabbit_ct_vm_helpers:get_ct_peers(Config)],
+ stop_rabbitmq_nodes_on_vms(Config, NodeConfigs, NodeConfigsPerCTPeer).
+
+stop_rabbitmq_nodes_on_vms(Config, [NodeConfig | Rest],
+ NodeConfigsPerCTPeer) ->
+ RabbitMQNode = ?config(nodename, NodeConfig),
+ Hostname = rabbit_ct_helpers:nodename_to_hostname(RabbitMQNode),
+ {H, N, NodeConfigs} = lists:keyfind(Hostname, 1, NodeConfigsPerCTPeer),
+ NewEntry = {H, N, [NodeConfig | NodeConfigs]},
+ NodeConfigsPerCTPeer1 = lists:keystore(Hostname, 1,
+ NodeConfigsPerCTPeer,
+ NewEntry),
+ stop_rabbitmq_nodes_on_vms(Config, Rest, NodeConfigsPerCTPeer1);
+stop_rabbitmq_nodes_on_vms(Config, [], NodeConfigsPerCTPeer) ->
+ lists:foreach(
+ fun({_, CTPeer, NodeConfigs}) ->
+ Config1 = rabbit_ct_helpers:set_config(Config,
+ {rmq_nodes,
+ NodeConfigs}),
+ rabbit_ct_vm_helpers:rpc(Config1,
+ CTPeer,
+ ?MODULE,
+ stop_rabbitmq_nodes,
+ [Config1])
+ end, NodeConfigsPerCTPeer),
+ rabbit_ct_helpers:delete_config(Config, rmq_nodes).
+
+stop_rabbitmq_nodes(Config) ->
+ NodeConfigs = get_node_configs(Config),
+ _ = handle_nodes_in_parallel(
+ NodeConfigs,
+ fun(NodeConfig) ->
+ stop_rabbitmq_node(Config, NodeConfig)
+ end),
+ proplists:delete(rmq_nodes, Config).
+
+stop_rabbitmq_node(Config, NodeConfig) ->
+ SrcDir = ?config(effective_srcdir, NodeConfig),
+ InitialMakeVars = ?config(make_vars_for_node_startup, NodeConfig),
+ Nodename = ?config(nodename, NodeConfig),
+ InitialNodename = ?config(initial_nodename, NodeConfig),
+ MakeVars = InitialMakeVars ++ [
+ {"RABBITMQ_NODENAME=~s", [Nodename]},
+ {"RABBITMQ_NODENAME_FOR_PATHS=~s", [InitialNodename]}
+ ],
+ Cmd = ["stop-node" | MakeVars],
+ case rabbit_ct_helpers:get_config(Config, rabbitmq_run_cmd) of
+ undefined ->
+ rabbit_ct_helpers:make(Config, SrcDir, Cmd);
+ RunCmd ->
+ rabbit_ct_helpers:exec([RunCmd | Cmd])
+ end,
+ NodeConfig.
+
+%% -------------------------------------------------------------------
+%% Helpers for partition simulation
+%% -------------------------------------------------------------------
+
+configure_dist_proxy(Config) ->
+ rabbit_ct_helpers:set_config(Config,
+ {erlang_dist_module, inet_tcp_proxy_dist}).
+
+block_traffic_between(NodeA, NodeB) ->
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Blocking traffic between ~s and ~s",
+ [NodeA, NodeB]),
+ ?assertEqual(ok, rpc:call(NodeA, inet_tcp_proxy_dist, block, [NodeB])),
+ ?assertEqual(ok, rpc:call(NodeB, inet_tcp_proxy_dist, block, [NodeA])).
+
+allow_traffic_between(NodeA, NodeB) ->
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Unblocking traffic between ~s and ~s",
+ [NodeA, NodeB]),
+ ?assertEqual(ok, rpc:call(NodeA, inet_tcp_proxy_dist, allow, [NodeB])),
+ ?assertEqual(ok, rpc:call(NodeB, inet_tcp_proxy_dist, allow, [NodeA])).
+
+set_partition_handling_mode_globally(Config, Mode) ->
+ rpc_all(Config,
+ application, set_env, [rabbit, cluster_partition_handling, Mode]).
+
+set_partition_handling_mode(Config, Nodes, Mode) ->
+ rpc(Config, Nodes,
+ application, set_env, [rabbit, cluster_partition_handling, Mode]).
+
+%% -------------------------------------------------------------------
+%% Calls to rabbitmqctl from Erlang.
+%% -------------------------------------------------------------------
+
+control_action(Command, Node) ->
+ control_action(Command, Node, [], []).
+
+control_action(Command, Node, Args) ->
+ control_action(Command, Node, Args, []).
+
+control_action(Command, Node, Args, Opts) ->
+ rabbit_control_helper:command(Command, Node, Args, Opts).
+
+%% Use rabbitmqctl(1) instead of using the Erlang API.
+
+rabbitmqctl(Config, Node, Args) ->
+ rabbitmqctl(Config, Node, Args, infinity).
+
+rabbitmqctl(Config, Node, Args, Timeout) ->
+ Rabbitmqctl = ?config(rabbitmqctl_cmd, Config),
+ NodeConfig = get_node_config(Config, Node),
+ Nodename = ?config(nodename, NodeConfig),
+ Env0 = [
+ {"RABBITMQ_SCRIPTS_DIR", filename:dirname(Rabbitmqctl)},
+ {"RABBITMQ_PID_FILE", ?config(pid_file, NodeConfig)},
+ {"RABBITMQ_MNESIA_DIR", ?config(mnesia_dir, NodeConfig)},
+ {"RABBITMQ_PLUGINS_DIR", ?config(plugins_dir, NodeConfig)},
+ {"RABBITMQ_ENABLED_PLUGINS_FILE",
+ ?config(enabled_plugins_file, NodeConfig)}
+ ],
+ Ret = rabbit_ct_helpers:get_config(
+ NodeConfig, enabled_feature_flags_list_file),
+ Env = case Ret of
+ undefined ->
+ Env0;
+ EnabledFeatureFlagsFile ->
+ Env0 ++
+ [{"RABBITMQ_FEATURE_FLAGS_FILE", EnabledFeatureFlagsFile}]
+ end,
+ Cmd = [Rabbitmqctl, "-n", Nodename | Args],
+ rabbit_ct_helpers:exec(Cmd, [{env, Env}, {timeout, Timeout}]).
+
+rabbitmqctl_list(Config, Node, Args) ->
+ {ok, StdOut} = rabbitmqctl(Config, Node, Args),
+ [<<"Timeout:", _/binary>>,
+ <<"Listing", _/binary>>
+ | Rows] = re:split(StdOut, <<"\n">>, [trim]),
+ [re:split(Row, <<"\t">>) || Row <- Rows].
+
+rabbitmq_queues(Config, Node, Args) ->
+ RabbitmqQueues = ?config(rabbitmq_queues_cmd, Config),
+ NodeConfig = rabbit_ct_broker_helpers:get_node_config(Config, Node),
+ Nodename = ?config(nodename, NodeConfig),
+ Env0 = [
+ {"RABBITMQ_SCRIPTS_DIR", filename:dirname(RabbitmqQueues)},
+ {"RABBITMQ_PID_FILE", ?config(pid_file, NodeConfig)},
+ {"RABBITMQ_MNESIA_DIR", ?config(mnesia_dir, NodeConfig)},
+ {"RABBITMQ_PLUGINS_DIR", ?config(plugins_dir, NodeConfig)},
+ {"RABBITMQ_ENABLED_PLUGINS_FILE",
+ ?config(enabled_plugins_file, NodeConfig)}
+ ],
+ Ret = rabbit_ct_helpers:get_config(
+ NodeConfig, enabled_feature_flags_list_file),
+ Env = case Ret of
+ undefined ->
+ Env0;
+ EnabledFeatureFlagsFile ->
+ Env0 ++
+ [{"RABBITMQ_FEATURE_FLAGS_FILE", EnabledFeatureFlagsFile}]
+ end,
+ Cmd = [RabbitmqQueues, "-n", Nodename | Args],
+ rabbit_ct_helpers:exec(Cmd, [{env, Env}]).
+
+%% -------------------------------------------------------------------
+%% Other helpers.
+%% -------------------------------------------------------------------
+
+get_node_configs(Config) ->
+ ?config(rmq_nodes, Config).
+
+get_node_configs(Config, Key) ->
+ NodeConfigs = get_node_configs(Config),
+ [?config(Key, NodeConfig) || NodeConfig <- NodeConfigs].
+
+get_node_config(Config, Node) when is_atom(Node) andalso Node =/= undefined ->
+ NodeConfigs = get_node_configs(Config),
+ get_node_config1(NodeConfigs, Node);
+get_node_config(Config, I) when is_integer(I) andalso I >= 0 ->
+ NodeConfigs = get_node_configs(Config),
+ lists:nth(I + 1, NodeConfigs).
+
+get_node_config1([NodeConfig | Rest], Node) ->
+ case ?config(nodename, NodeConfig) of
+ Node -> NodeConfig;
+ _ -> case ?config(initial_nodename, NodeConfig) of
+ Node -> NodeConfig;
+ _ -> get_node_config1(Rest, Node)
+ end
+ end;
+get_node_config1([], Node) ->
+ exit({unknown_node, Node}).
+
+get_node_config(Config, Node, Key) ->
+ NodeConfig = get_node_config(Config, Node),
+ ?config(Key, NodeConfig).
+
+set_node_config(Config, Node, Tuples) ->
+ NodeConfig = get_node_config(Config, Node),
+ NodeConfig1 = rabbit_ct_helpers:set_config(NodeConfig, Tuples),
+ replace_entire_node_config(Config, Node, NodeConfig1).
+
+replace_entire_node_config(Config, Node, NewNodeConfig) ->
+ NodeConfigs = get_node_configs(Config),
+ NodeConfigs1 = lists:map(
+ fun(NodeConfig) ->
+ Match = case ?config(nodename, NodeConfig) of
+ Node -> true;
+ _ -> case ?config(initial_nodename, NodeConfig) of
+ Node -> true;
+ _ -> false
+ end
+ end,
+ if
+ Match -> NewNodeConfig;
+ true -> NodeConfig
+ end
+ end, NodeConfigs),
+ rabbit_ct_helpers:set_config(Config, {rmq_nodes, NodeConfigs1}).
+
+nodename_to_index(Config, Node) ->
+ NodeConfigs = get_node_configs(Config),
+ nodename_to_index1(NodeConfigs, Node, 0).
+
+nodename_to_index1([NodeConfig | Rest], Node, I) ->
+ case ?config(nodename, NodeConfig) of
+ Node -> I;
+ _ -> case ?config(initial_nodename, NodeConfig) of
+ Node -> I;
+ _ -> nodename_to_index1(Rest, Node, I + 1)
+ end
+ end;
+nodename_to_index1([], Node, _) ->
+ exit({unknown_node, Node}).
+
+node_uri(Config, Node) ->
+ node_uri(Config, Node, []).
+
+node_uri(Config, Node, amqp) ->
+ node_uri(Config, Node, []);
+node_uri(Config, Node, management) ->
+ node_uri(Config, Node, [
+ {scheme, "http"},
+ {tcp_port_name, tcp_port_mgmt}
+ ]);
+node_uri(Config, Node, Options) ->
+ Scheme = proplists:get_value(scheme, Options, "amqp"),
+ Hostname = case proplists:get_value(use_ipaddr, Options, false) of
+ true ->
+ {ok, Hostent} = inet:gethostbyname(?config(rmq_hostname, Config)),
+ format_ipaddr_for_uri(Hostent);
+ Family when Family =:= inet orelse Family =:= inet6 ->
+ {ok, Hostent} = inet:gethostbyname(?config(rmq_hostname, Config),
+ Family),
+ format_ipaddr_for_uri(Hostent);
+ false ->
+ ?config(rmq_hostname, Config)
+ end,
+ TcpPortName = proplists:get_value(tcp_port_name, Options, tcp_port_amqp),
+ TcpPort = get_node_config(Config, Node, TcpPortName),
+ UserPass = case proplists:get_value(with_user, Options, false) of
+ true ->
+ User = proplists:get_value(user, Options, "guest"),
+ Password = proplists:get_value(password, Options, "guest"),
+ io_lib:format("~s:~s@", [User, Password]);
+ false ->
+ ""
+ end,
+ list_to_binary(
+ rabbit_misc:format("~s://~s~s:~b",
+ [Scheme, UserPass, Hostname, TcpPort])).
+
+format_ipaddr_for_uri(
+ #hostent{h_addrtype = inet, h_addr_list = [IPAddr | _]}) ->
+ {A, B, C, D} = IPAddr,
+ io_lib:format("~b.~b.~b.~b", [A, B, C, D]);
+format_ipaddr_for_uri(
+ #hostent{h_addrtype = inet6, h_addr_list = [IPAddr | _]}) ->
+ {A, B, C, D, E, F, G, H} = IPAddr,
+ Res0 = io_lib:format(
+ "~.16b:~.16b:~.16b:~.16b:~.16b:~.16b:~.16b:~.16b",
+ [A, B, C, D, E, F, G, H]),
+ Res1 = re:replace(Res0, "(^0(:0)+$|^(0:)+|(:0)+$)|:(0:)+", "::"),
+ "[" ++ Res1 ++ "]".
+
+
+%% Virtual host management
+
+add_vhost(Config, VHost) ->
+ add_vhost(Config, 0, VHost).
+
+add_vhost(Config, Node, VHost) ->
+ add_vhost(Config, Node, VHost, <<"acting-user">>).
+
+add_vhost(Config, Node, VHost, Username) ->
+ catch rpc(Config, Node, rabbit_vhost, add, [VHost, Username]).
+
+delete_vhost(Config, VHost) ->
+ delete_vhost(Config, 0, VHost).
+
+delete_vhost(Config, Node, VHost) ->
+ delete_vhost(Config, Node, VHost, <<"acting-user">>).
+
+delete_vhost(Config, Node, VHost, Username) ->
+ catch rpc(Config, Node, rabbit_vhost, delete, [VHost, Username]).
+
+force_vhost_failure(Config, VHost) -> force_vhost_failure(Config, 0, VHost).
+
+force_vhost_failure(Config, Node, VHost) ->
+ force_vhost_failure(Config, Node, VHost, 100).
+
+force_vhost_failure(_Config, _Node, VHost, 0) ->
+ error({failed_to_force_vhost_failure, no_more_attempts_left, VHost});
+force_vhost_failure(Config, Node, VHost, Attempts) ->
+ case rpc(Config, Node, rabbit_vhost_sup_sup, is_vhost_alive, [VHost]) of
+ true ->
+ try
+ MessageStorePid = get_message_store_pid(Config, Node, VHost),
+ rpc(Config, Node,
+ erlang, exit, [MessageStorePid, force_vhost_failure]),
+ %% Give it a time to fail
+ timer:sleep(300),
+ force_vhost_failure(Config, Node, VHost, Attempts - 1)
+ catch
+ %% The vhost terminated while we were checking again.
+ exit:{exception, {shutdown, _}} ->
+ timer:sleep(300),
+ force_vhost_failure(Config, Node, VHost, Attempts - 1);
+ exit:{exception,
+ {badmatch,
+ {error,
+ {vhost_supervisor_not_running, VHost}}}} ->
+ %% This badmatch may occur in get_message_store_pid/3 as a
+ %% result of `{ok, VHostSup} = rpc(...)`.
+ timer:sleep(300),
+ force_vhost_failure(Config, Node, VHost, Attempts - 1)
+ end;
+ false -> ok
+ end.
+
+set_alarm(Config, Node, file_descriptor_limit = Resource) ->
+ rpc(Config, Node, rabbit_alarm, set_alarm, [{Resource, []}]);
+set_alarm(Config, Node, memory = Resource) ->
+ rpc(Config, Node, rabbit_alarm, set_alarm, [{{resource_limit, Resource, Node}, []}]);
+set_alarm(Config, Node, disk = Resource) ->
+ rpc(Config, Node, rabbit_alarm, set_alarm, [{{resource_limit, Resource, Node}, []}]).
+
+get_alarms(Config, Node) ->
+ rpc(Config, Node, rabbit_alarm, get_alarms, []).
+
+get_local_alarms(Config, Node) ->
+ rpc(Config, Node, rabbit_alarm, get_local_alarms, []).
+
+clear_alarm(Config, Node, file_descriptor_limit = Resource) ->
+ rpc(Config, Node, rabbit_alarm, clear_alarm, [Resource]);
+clear_alarm(Config, Node, memory = Resource) ->
+ rpc(Config, Node, rabbit_alarm, clear_alarm, [{resource_limit, Resource, Node}]);
+clear_alarm(Config, Node, disk = Resource) ->
+ rpc(Config, Node, rabbit_alarm, clear_alarm, [{resource_limit, Resource, Node}]).
+
+clear_all_alarms(Config, Node) ->
+ lists:foreach(fun ({file_descriptor_limit, _}) ->
+ clear_alarm(Config, Node, file_descriptor_limit);
+ ({{resource_limit, Resource, OnNode}, _}) when OnNode =:= Node ->
+ clear_alarm(Config, Node, Resource);
+ (_) -> ok
+ end, get_alarms(Config, Node)),
+ ok.
+
+get_message_store_pid(Config, Node, VHost) ->
+ {ok, VHostSup} = rpc(Config, Node,
+ rabbit_vhost_sup_sup, get_vhost_sup, [VHost]),
+ Children = rpc(Config, Node, supervisor, which_children, [VHostSup]),
+ [MsgStorePid] = [Pid || {Name, Pid, _, _} <- Children,
+ Name == msg_store_persistent],
+ MsgStorePid.
+
+add_user(Config, Username) ->
+ %% for many tests it is convenient that
+ %% the username and password match
+ add_user(Config, 0, Username, Username).
+
+add_user(Config, Username, Password) ->
+ add_user(Config, 0, Username, Password).
+
+add_user(Config, Node, Username, Password) ->
+ add_user(Config, Node, Username, Password, <<"acting-user">>).
+
+add_user(Config, Node, Username, Password, AuditUsername) ->
+ catch rpc(Config, Node, rabbit_auth_backend_internal, add_user,
+ [rabbit_data_coercion:to_binary(Username),
+ rabbit_data_coercion:to_binary(Password),
+ AuditUsername]).
+
+set_user_tags(Config, Node, Username, Tags) ->
+ set_user_tags(Config, Node, Username, Tags, <<"acting-user">>).
+
+set_user_tags(Config, Node, Username, Tags, AuditUsername) ->
+ catch rpc(Config, Node, rabbit_auth_backend_internal, set_tags,
+ [Username, Tags, AuditUsername]).
+
+delete_user(Config, Username) ->
+ delete_user(Config, 0, Username).
+
+delete_user(Config, Node, Username) ->
+ delete_user(Config, Node, Username, <<"acting-user">>).
+
+delete_user(Config, Node, Username, AuditUsername) ->
+ catch rpc(Config, Node, rabbit_auth_backend_internal, delete_user,
+ [Username, AuditUsername]).
+
+change_password(Config, Username, Password) ->
+ change_password(Config, 0, Username, Password, <<"acting-user">>).
+
+change_password(Config, Node, Username, Password, AuditUsername) ->
+ rpc(Config, Node, rabbit_auth_backend_internal, change_password,
+ [Username, Password, AuditUsername]).
+
+clear_password(Config, Node, Username, AuditUsername) ->
+ rpc(Config, Node, rabbit_auth_backend_internal, clear_password,
+ [Username, AuditUsername]).
+
+switch_credential_validator(Config, accept_everything) ->
+ rpc(Config, 0, application, set_env,
+ [rabbit, credential_validator,
+ [{validation_backend, rabbit_credential_validator_accept_everything}]]);
+
+switch_credential_validator(Config, min_length) ->
+ switch_credential_validator(Config, min_length, 5);
+
+switch_credential_validator(Config, regexp) ->
+ switch_credential_validator(Config, regexp, <<"^xyz\\d{10,12}$">>).
+
+
+switch_credential_validator(Config, min_length, MinLength) ->
+ ok = rpc(Config, 0, application, set_env,
+ [rabbit, credential_validator,
+ [{validation_backend, rabbit_credential_validator_min_password_length},
+ {min_length, MinLength}]]);
+
+switch_credential_validator(Config, regexp, RegExp) ->
+ ok = rpc(Config, 0, application, set_env,
+ [rabbit, credential_validator,
+ [{validation_backend, rabbit_credential_validator_password_regexp},
+ {regexp, RegExp}]]).
+
+set_full_permissions(Config, VHost) ->
+ set_permissions(Config, 0, <<"guest">>, VHost, <<".*">>, <<".*">>, <<".*">>).
+set_full_permissions(Config, Username, VHost) ->
+ set_permissions(Config, 0, Username, VHost, <<".*">>, <<".*">>, <<".*">>).
+set_full_permissions(Config, Node, Username, VHost) ->
+ set_permissions(Config, Node, Username, VHost, <<".*">>, <<".*">>, <<".*">>).
+
+set_permissions(Config, Username, VHost, ConfigurePerm, WritePerm, ReadPerm) ->
+ set_permissions(Config, 0, Username, VHost, ConfigurePerm, WritePerm, ReadPerm).
+
+set_permissions(Config, Node, Username, VHost, ConfigurePerm, WritePerm, ReadPerm) ->
+ set_permissions(Config, Node, Username, VHost, ConfigurePerm, WritePerm, ReadPerm,
+ <<"acting-user">>).
+
+set_permissions(Config, Node, Username, VHost, ConfigurePerm, WritePerm, ReadPerm,
+ ActingUser) ->
+ rpc(Config, Node,
+ rabbit_auth_backend_internal,
+ set_permissions,
+ [rabbit_data_coercion:to_binary(Username),
+ rabbit_data_coercion:to_binary(VHost),
+ rabbit_data_coercion:to_binary(ConfigurePerm),
+ rabbit_data_coercion:to_binary(WritePerm),
+ rabbit_data_coercion:to_binary(ReadPerm),
+ ActingUser]).
+
+clear_permissions(Config, VHost) ->
+ clear_permissions(Config, 0, <<"guest">>, VHost).
+clear_permissions(Config, Username, VHost) ->
+ clear_permissions(Config, 0, Username, VHost).
+
+clear_permissions(Config, Node, Username, VHost) ->
+ clear_permissions(Config, Node, Username, VHost, <<"acting-user">>).
+
+clear_permissions(Config, Node, Username, VHost, ActingUser) ->
+ catch rpc(Config, Node,
+ rabbit_auth_backend_internal,
+ clear_permissions,
+ [rabbit_data_coercion:to_binary(Username),
+ rabbit_data_coercion:to_binary(VHost),
+ ActingUser]).
+
+set_vhost_limit(Config, Node, VHost, Limit0, Value) ->
+ Limit = case Limit0 of
+ max_connections -> <<"max-connections">>;
+ max_queues -> <<"max-queues">>;
+ Other -> rabbit_data_coercion:to_binary(Other)
+ end,
+ Definition = rabbit_json:encode(#{Limit => Value}),
+ rpc(Config, Node,
+ rabbit_vhost_limit,
+ set,
+ [VHost, Definition, <<"ct-tests">>]).
+
+set_user_limits(Config, Username, Limits) ->
+ set_user_limits(Config, 0, Username, Limits).
+
+set_user_limits(Config, Node, Username, Limits0) when is_map(Limits0) ->
+ Limits =
+ maps:fold(fun(Limit0, Val, Acc) ->
+ Limit = case Limit0 of
+ max_connections -> <<"max-connections">>;
+ max_channels -> <<"max-channels">>;
+ Other -> rabbit_data_coercion:to_binary(Other)
+ end,
+ maps:merge(#{Limit => Val}, Acc)
+ end, #{}, Limits0),
+ rpc(Config, Node,
+ rabbit_auth_backend_internal,
+ set_user_limits,
+ [Username, Limits, <<"ct-tests">>]).
+
+clear_user_limits(Config, Username, Limit) ->
+ clear_user_limits(Config, 0, Username, Limit).
+
+clear_user_limits(Config, Node, Username, Limit0) ->
+ Limit = case Limit0 of
+ all -> <<"all">>;
+ max_connections -> <<"max-connections">>;
+ max_channels -> <<"max-channels">>;
+ Other -> rabbit_data_coercion:to_binary(Other)
+ end,
+ rpc(Config, Node,
+ rabbit_auth_backend_internal,
+ clear_user_limits,
+ [Username, Limit, <<"ct-tests">>]).
+
+%% Functions to execute code on a remote node/broker.
+
+add_code_path_to_node(Node, Module) ->
+ Path1 = filename:dirname(code:which(Module)),
+ Path2 = filename:dirname(code:which(?MODULE)),
+ Paths = filter_ct_helpers_and_testsuites_paths(
+ lists:usort([Path1, Path2])),
+ case Paths of
+ [] ->
+ ok;
+ _ ->
+ case rpc:call(Node, code, get_path, []) of
+ ExistingPaths when is_list(ExistingPaths) ->
+ lists:foreach(
+ fun(P) ->
+ case lists:member(P, ExistingPaths) of
+ true ->
+ ok;
+ false ->
+ case rpc:call(
+ Node, code, add_pathz, [P]) of
+ true ->
+ ok;
+ Error ->
+ erlang:error({Error, P})
+ end
+ end
+ end, Paths);
+ Error ->
+ ct:pal(?LOW_IMPORTANCE,
+ "Failed to retrieve current code path from node ~s: ~p~n",
+ [Node, Error]),
+ ok
+ end
+ end.
+
+filter_ct_helpers_and_testsuites_paths(CodePath) ->
+ lists:filter(
+ fun(Dir) ->
+ DirName = filename:basename(Dir),
+ ParentDirName = filename:basename(
+ filename:dirname(Dir)),
+ Dir =/= "." andalso
+ %% FIXME: This filtering is too naive. How to properly
+ %% distinguish RabbitMQ-related applications from
+ %% test-only modules?
+ ((ParentDirName =:= "rabbitmq_ct_helpers" andalso
+ DirName =:= "ebin") orelse
+ (ParentDirName =:= "rabbitmq_ct_client_helpers" andalso
+ DirName =:= "ebin") orelse
+ (DirName =:= "test"))
+ end, CodePath).
+
+add_code_path_to_all_nodes(Config, Module) ->
+ Nodenames = get_node_configs(Config, nodename),
+ [ok = add_code_path_to_node(Nodename, Module)
+ || Nodename <- Nodenames],
+ ok.
+
+rpc(Config, Node, Module, Function, Args)
+when is_atom(Node) andalso Node =/= undefined ->
+ rpc(Config, Node, Module, Function, Args, infinity);
+rpc(Config, I, Module, Function, Args)
+when is_integer(I) andalso I >= 0 ->
+ Node = get_node_config(Config, I, nodename),
+ rpc(Config, Node, Module, Function, Args);
+rpc(Config, Nodes, Module, Function, Args)
+when is_list(Nodes) ->
+ [rpc(Config, Node, Module, Function, Args) || Node <- Nodes].
+
+rpc(_Config, Node, Module, Function, Args, Timeout)
+when is_atom(Node) andalso Node =/= undefined ->
+ %% We add some directories to the broker node search path.
+ add_code_path_to_node(Node, Module),
+ erpc:call(Node, Module, Function, Args, Timeout);
+rpc(Config, I, Module, Function, Args, Timeout)
+when is_integer(I) andalso I >= 0 ->
+ Node = get_node_config(Config, I, nodename),
+ rpc(Config, Node, Module, Function, Args, Timeout);
+rpc(Config, Nodes, Module, Function, Args, Timeout)
+when is_list(Nodes) ->
+ [rpc(Config, Node, Module, Function, Args, Timeout) || Node <- Nodes].
+
+rpc_all(Config, Module, Function, Args) ->
+ Nodes = get_node_configs(Config, nodename),
+ rpc(Config, Nodes, Module, Function, Args).
+
+rpc_all(Config, Module, Function, Args, Timeout) ->
+ Nodes = get_node_configs(Config, nodename),
+ rpc(Config, Nodes, Module, Function, Args, Timeout).
+
+%% Functions to start/restart/stop only the broker or the full Erlang
+%% node.
+
+start_node(Config, Node) ->
+ NodeConfig = get_node_config(Config, Node),
+ I = if
+ is_atom(Node) -> nodename_to_index(Config, Node);
+ true -> Node
+ end,
+ case do_start_rabbitmq_node(Config, NodeConfig, I) of
+ {skip, _} = Error -> {error, Error};
+ _ -> ok
+ end.
+
+start_broker(Config, Node) ->
+ ok = rpc(Config, Node, rabbit, start, []).
+
+restart_broker(Config, Node) ->
+ ok = rpc(Config, Node, ?MODULE, do_restart_broker, []).
+
+do_restart_broker() ->
+ ok = rabbit:stop(),
+ ok = rabbit:start().
+
+stop_broker(Config, Node) ->
+ ok = rpc(Config, Node, rabbit, stop, []).
+
+restart_node(Config, Node) ->
+ ok = stop_node(Config, Node),
+ ok = start_node(Config, Node).
+
+stop_node(Config, Node) ->
+ NodeConfig = get_node_config(Config, Node),
+ case stop_rabbitmq_node(Config, NodeConfig) of
+ {skip, _} = Error -> Error;
+ _ -> ok
+ end.
+
+stop_node_after(Config, Node, Sleep) ->
+ timer:sleep(Sleep),
+ stop_node(Config, Node).
+
+kill_node(Config, Node) ->
+ Pid = rpc(Config, Node, os, getpid, []),
+ %% FIXME maybe_flush_cover(Cfg),
+ Cmd = case os:type() of
+ {win32, _} ->
+ case os:find_executable("taskkill.exe") of
+ false ->
+ rabbit_misc:format(
+ "PowerShell -Command "
+ "\"Stop-Process -Id ~s -Force\"",
+ [Pid]);
+ _ ->
+ rabbit_misc:format("taskkill /PID ~s /F", [Pid])
+ end;
+ _ ->
+ rabbit_misc:format("kill -9 ~s", [Pid])
+ end,
+ os:cmd(Cmd),
+ await_os_pid_death(Pid).
+
+kill_node_after(Config, Node, Sleep) ->
+ timer:sleep(Sleep),
+ kill_node(Config, Node).
+
+cluster_members_online(Config, Node) ->
+ rpc(Config, Node, rabbit_nodes, all_running, []).
+
+await_os_pid_death(Pid) ->
+ case rabbit_misc:is_os_process_alive(Pid) of
+ true -> timer:sleep(100),
+ await_os_pid_death(Pid);
+ false -> ok
+ end.
+
+reset_node(Config, Node) ->
+ Name = rabbit_ct_broker_helpers:get_node_config(Config, Node, nodename),
+ rabbit_control_helper:command(reset, Name).
+
+force_reset_node(Config, Node) ->
+ Name = rabbit_ct_broker_helpers:get_node_config(Config, Node, nodename),
+ rabbit_control_helper:command(force_reset, Name).
+
+forget_cluster_node(Config, Node, NodeToForget) ->
+ forget_cluster_node(Config, Node, NodeToForget, []).
+forget_cluster_node(Config, Node, NodeToForget, Opts) ->
+ Name = rabbit_ct_broker_helpers:get_node_config(Config, Node, nodename),
+ NameToForget =
+ rabbit_ct_broker_helpers:get_node_config(Config, NodeToForget, nodename),
+ rabbit_control_helper:command(forget_cluster_node, Name, [NameToForget], Opts).
+
+is_feature_flag_supported(Config, FeatureName) ->
+ Nodes = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ is_feature_flag_supported(Config, Nodes, FeatureName).
+
+is_feature_flag_supported(Config, [Node1 | _] = Nodes, FeatureName) ->
+ rabbit_ct_broker_helpers:rpc(
+ Config, Node1,
+ rabbit_feature_flags, is_supported_remotely,
+ [Nodes, [FeatureName], 60000]).
+
+enable_feature_flag(Config, FeatureName) ->
+ Nodes = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ enable_feature_flag(Config, Nodes, FeatureName).
+
+enable_feature_flag(Config, [Node1 | _] = Nodes, FeatureName) ->
+ case is_feature_flag_supported(Config, Nodes, FeatureName) of
+ true ->
+ rabbit_ct_broker_helpers:rpc(
+ Config, Node1, rabbit_feature_flags, enable, [FeatureName]);
+ false ->
+ {skip,
+ lists:flatten(
+ io_lib:format("'~s' feature flag is unsupported",
+ [FeatureName]))}
+ end.
+
+mark_as_being_drained(Config, Node) ->
+ rabbit_ct_broker_helpers:rpc(Config, Node, rabbit_maintenance, mark_as_being_drained, []).
+unmark_as_being_drained(Config, Node) ->
+ rabbit_ct_broker_helpers:rpc(Config, Node, rabbit_maintenance, unmark_as_being_drained, []).
+
+drain_node(Config, Node) ->
+ rabbit_ct_broker_helpers:rpc(Config, Node, rabbit_maintenance, drain, []).
+revive_node(Config, Node) ->
+ rabbit_ct_broker_helpers:rpc(Config, Node, rabbit_maintenance, revive, []).
+
+is_being_drained_consistent_read(Config, Node) ->
+ rabbit_ct_broker_helpers:rpc(Config, Node, rabbit_maintenance, is_being_drained_consistent_read, [Node]).
+is_being_drained_local_read(Config, Node) ->
+ rabbit_ct_broker_helpers:rpc(Config, Node, rabbit_maintenance, is_being_drained_local_read, [Node]).
+
+is_being_drained_consistent_read(Config, TargetNode, NodeToCheck) ->
+ rabbit_ct_broker_helpers:rpc(Config, TargetNode, rabbit_maintenance, is_being_drained_consistent_read, [NodeToCheck]).
+is_being_drained_local_read(Config, TargetNode, NodeToCheck) ->
+ rabbit_ct_broker_helpers:rpc(Config, TargetNode, rabbit_maintenance, is_being_drained_local_read, [NodeToCheck]).
+
+%% From a given list of gen_tcp client connections, return the list of
+%% connection handler PID in RabbitMQ.
+get_connection_pids(Connections) ->
+ ConnInfos = [
+ begin
+ {ok, {Addr, Port}} = inet:sockname(Connection),
+ [{peer_host, Addr}, {peer_port, Port}]
+ end || Connection <- Connections],
+ lists:filter(
+ fun(Conn) ->
+ ConnInfo = rabbit_networking:connection_info(Conn,
+ [peer_host, peer_port]),
+ %% On at least Mac OS X, for a connection on localhost, the
+ %% client side of the connection gives its IPv4 address
+ %% (127.0.0.1), but the server side gives some kind of
+ %% non-standard IPv6 address (::ffff:7f00:1, not even the
+ %% standard ::1). So let's test for this alternate form too.
+ AltConnInfo = case proplists:get_value(peer_host, ConnInfo) of
+ {0, 0, 0, 0, 0, 16#ffff, 16#7f00, N} ->
+ lists:keyreplace(peer_host, 1, ConnInfo,
+ {peer_host, {127, 0, 0, N}});
+ _ ->
+ ConnInfo
+ end,
+ lists:member(ConnInfo, ConnInfos) orelse
+ lists:member(AltConnInfo, ConnInfos)
+ end, rabbit_networking:connections()).
+
+close_all_connections(Config, Node, Reason) ->
+ rpc(Config, Node, rabbit_networking, close_all_connections, [Reason]),
+ ok.
+
+%% -------------------------------------------------------------------
+%% Policy helpers.
+%% -------------------------------------------------------------------
+
+set_policy(Config, Node, Name, Pattern, ApplyTo, Definition) ->
+ set_policy(Config, Node, Name, Pattern, ApplyTo, Definition, <<"acting-user">>).
+
+set_policy(Config, Node, Name, Pattern, ApplyTo, Definition, Username) ->
+ set_policy_in_vhost(Config, Node, <<"/">>, Name, Pattern, ApplyTo, Definition, Username).
+
+set_policy_in_vhost(Config, Node, VirtualHost, Name, Pattern, ApplyTo, Definition) ->
+ ok = rpc(Config, Node,
+ rabbit_policy, set, [VirtualHost, Name, Pattern, Definition, 0, ApplyTo,
+ <<"acting-user">>]).
+set_policy_in_vhost(Config, Node, VirtualHost, Name, Pattern, ApplyTo, Definition, Username) ->
+ ok = rpc(Config, Node,
+ rabbit_policy, set, [VirtualHost, Name, Pattern, Definition, 0, ApplyTo,
+ Username]).
+
+clear_policy(Config, Node, Name) ->
+ clear_policy(Config, Node, Name, <<"acting-user">>).
+
+clear_policy(Config, Node, Name, Username) ->
+ rpc(Config, Node,
+ rabbit_policy, delete, [<<"/">>, Name, Username]).
+
+set_operator_policy(Config, Node, Name, Pattern, ApplyTo, Definition) ->
+ ok = rpc(Config, Node,
+ rabbit_policy, set_op, [<<"/">>, Name, Pattern, Definition, 0, ApplyTo,
+ <<"acting-user">>]).
+
+clear_operator_policy(Config, Node, Name) ->
+ rpc(Config, Node,
+ rabbit_policy, delete_op, [<<"/">>, Name, <<"acting-user">>]).
+
+set_ha_policy(Config, Node, Pattern, Policy) ->
+ set_ha_policy(Config, Node, Pattern, Policy, []).
+
+set_ha_policy(Config, Node, Pattern, Policy, Extra) ->
+ set_policy(Config, Node, Pattern, Pattern, <<"queues">>,
+ ha_policy(Policy) ++ Extra).
+
+ha_policy(<<"all">>) -> [{<<"ha-mode">>, <<"all">>}];
+ha_policy({Mode, Params}) -> [{<<"ha-mode">>, Mode},
+ {<<"ha-params">>, Params}].
+
+set_ha_policy_all(Config) ->
+ set_ha_policy(Config, 0, <<".*">>, <<"all">>),
+ Config.
+
+set_ha_policy_all(Config, Extra) ->
+ set_ha_policy(Config, 0, <<".*">>, <<"all">>, Extra),
+ Config.
+
+set_ha_policy_two_pos(Config) ->
+ Members = [
+ rabbit_misc:atom_to_binary(N)
+ || N <- get_node_configs(Config, nodename)],
+ TwoNodes = [M || M <- lists:sublist(Members, 2)],
+ set_ha_policy(Config, 0, <<"^ha.two.">>, {<<"nodes">>, TwoNodes},
+ [{<<"ha-promote-on-shutdown">>, <<"always">>}]),
+ set_ha_policy(Config, 0, <<"^ha.auto.">>, {<<"nodes">>, TwoNodes},
+ [{<<"ha-sync-mode">>, <<"automatic">>},
+ {<<"ha-promote-on-shutdown">>, <<"always">>}]),
+ Config.
+
+set_ha_policy_two_pos_batch_sync(Config) ->
+ Members = [
+ rabbit_misc:atom_to_binary(N)
+ || N <- get_node_configs(Config, nodename)],
+ TwoNodes = [M || M <- lists:sublist(Members, 2)],
+ set_ha_policy(Config, 0, <<"^ha.two.">>, {<<"nodes">>, TwoNodes},
+ [{<<"ha-promote-on-shutdown">>, <<"always">>}]),
+ set_ha_policy(Config, 0, <<"^ha.auto.">>, {<<"nodes">>, TwoNodes},
+ [{<<"ha-sync-mode">>, <<"automatic">>},
+ {<<"ha-sync-batch-size">>, 200},
+ {<<"ha-promote-on-shutdown">>, <<"always">>}]),
+ Config.
+
+%% -------------------------------------------------------------------
+%% Parameter helpers.
+%% -------------------------------------------------------------------
+
+set_parameter(Config, Node, Component, Name, Value) ->
+ set_parameter(Config, Node, <<"/">>, Component, Name, Value, none).
+
+set_parameter(Config, Node, VHost, Component, Name, Value) ->
+ set_parameter(Config, Node, VHost, Component, Name, Value, none).
+
+set_parameter(Config, Node, VHost, Component, Name, Value, Username) ->
+ ok = rpc(Config, Node,
+ rabbit_runtime_parameters, set, [VHost, Component, Name, Value, Username]).
+
+clear_parameter(Config, Node, Component, Name) ->
+ clear_parameter(Config, Node, <<"/">>, Component, Name).
+
+clear_parameter(Config, Node, VHost, Component, Name) ->
+ clear_parameter(Config, Node, VHost, Component, Name, <<"acting-user">>).
+
+clear_parameter(Config, Node, VHost, Component, Name, Username) ->
+ ok = rpc(Config, Node,
+ rabbit_runtime_parameters, clear, [VHost, Component, Name, Username]).
+
+set_global_parameter(Config, Name, Value) ->
+ set_global_parameter(Config, 0, Name, Value).
+set_global_parameter(Config, Node, Name, Value) ->
+ ok = rpc(Config, Node,
+ rabbit_runtime_parameters, set_global, [Name, Value, <<"acting-user">>]).
+
+clear_global_parameter(Config, Name) ->
+ clear_global_parameter(Config, 0, Name).
+clear_global_parameter(Config, Node, Name) ->
+ ok = rpc(Config, Node,
+ rabbit_runtime_parameters, clear_global, [Name, <<"acting-user">>]).
+
+%% -------------------------------------------------------------------
+%% Parameter helpers.
+%% -------------------------------------------------------------------
+
+enable_plugin(Config, Node, Plugin) ->
+ plugin_action(Config, Node, [enable, Plugin]).
+
+disable_plugin(Config, Node, Plugin) ->
+ plugin_action(Config, Node, [disable, Plugin]).
+
+plugin_action(Config, Node, Args) ->
+ Rabbitmqplugins = ?config(rabbitmq_plugins_cmd, Config),
+ NodeConfig = get_node_config(Config, Node),
+ Nodename = ?config(nodename, NodeConfig),
+ Env = [
+ {"RABBITMQ_SCRIPTS_DIR", filename:dirname(Rabbitmqplugins)},
+ {"RABBITMQ_PID_FILE", ?config(pid_file, NodeConfig)},
+ {"RABBITMQ_MNESIA_DIR", ?config(mnesia_dir, NodeConfig)},
+ {"RABBITMQ_PLUGINS_DIR", ?config(plugins_dir, NodeConfig)},
+ {"RABBITMQ_ENABLED_PLUGINS_FILE",
+ ?config(enabled_plugins_file, NodeConfig)}
+ ],
+ Cmd = [Rabbitmqplugins, "-n", Nodename | Args],
+ {ok, _} = rabbit_ct_helpers:exec(Cmd, [{env, Env}]),
+ ok.
+
+%% -------------------------------------------------------------------
+
+test_channel() ->
+ Me = self(),
+ Writer = spawn(fun () -> test_writer(Me) end),
+ {ok, Limiter} = rabbit_limiter:start_link(no_id),
+ {ok, Ch} = rabbit_channel:start_link(
+ 1, Me, Writer, Me, "", rabbit_framing_amqp_0_9_1,
+ user(<<"guest">>), <<"/">>, [], Me, Limiter),
+ {Writer, Limiter, Ch}.
+
+test_writer(Pid) ->
+ receive
+ {'$gen_call', From, flush} -> gen_server:reply(From, ok),
+ test_writer(Pid);
+ {send_command, Method} -> Pid ! Method,
+ test_writer(Pid);
+ shutdown -> ok
+ end.
+
+user(Username) ->
+ #user{username = Username,
+ tags = [administrator],
+ authz_backends = [{rabbit_auth_backend_internal, none}]}.
diff --git a/deps/rabbitmq_ct_helpers/src/rabbit_ct_config_schema.erl b/deps/rabbitmq_ct_helpers/src/rabbit_ct_config_schema.erl
new file mode 100644
index 0000000000..65656b0363
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/src/rabbit_ct_config_schema.erl
@@ -0,0 +1,107 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_ct_config_schema).
+-include_lib("common_test/include/ct.hrl").
+
+-export([init_schemas/2]).
+-export([run_snippets/1]).
+
+init_schemas(App, Config) ->
+ ResultsDir = filename:join(?config(priv_dir, Config), "results"),
+ Snippets = filename:join(?config(data_dir, Config),
+ atom_to_list(App) ++ ".snippets"),
+ ok = file:make_dir(ResultsDir),
+ rabbit_ct_helpers:set_config(Config, [
+ {results_dir, ResultsDir},
+ {conf_snippets, Snippets}
+ ]).
+
+run_snippets(Config) ->
+ {ok, [Snippets]} = file:consult(?config(conf_snippets, Config)),
+ ct:pal("Loaded config schema snippets: ~p", [Snippets]),
+ lists:map(
+ fun({N, S, C, P}) -> ok = test_snippet(Config, {snippet_id(N), S, []}, C, P);
+ ({N, S, A, C, P}) -> ok = test_snippet(Config, {snippet_id(N), S, A}, C, P)
+ end,
+ Snippets),
+ ok.
+
+snippet_id(N) when is_integer(N) ->
+ integer_to_list(N);
+snippet_id(F) when is_float(F) ->
+ float_to_list(F);
+snippet_id(A) when is_atom(A) ->
+ atom_to_list(A);
+snippet_id(L) when is_list(L) ->
+ L.
+
+test_snippet(Config, Snippet, Expected, _Plugins) ->
+ {ConfFile, AdvancedFile} = write_snippet(Config, Snippet),
+ %% We ignore the rabbit -> log portion of the config on v3.9+, where the lager
+ %% dependency has been dropped
+ Generated = case code:which(lager) of
+ non_existing ->
+ without_rabbit_log(generate_config(ConfFile, AdvancedFile));
+ _ ->
+ generate_config(ConfFile, AdvancedFile)
+ end,
+ Gen = deepsort(Generated),
+ Exp = deepsort(Expected),
+ case Exp of
+ Gen -> ok;
+ _ ->
+ ct:pal("Expected: ~p~ngenerated: ~p", [Expected, Generated]),
+ ct:pal("Expected (sorted): ~p~ngenerated (sorted): ~p", [Exp, Gen]),
+ error({config_mismatch, Snippet, Exp, Gen})
+ end.
+
+write_snippet(Config, {Name, Conf, Advanced}) ->
+ ResultsDir = ?config(results_dir, Config),
+ file:make_dir(filename:join(ResultsDir, Name)),
+ ConfFile = filename:join([ResultsDir, Name, "config.conf"]),
+ AdvancedFile = filename:join([ResultsDir, Name, "advanced.config"]),
+
+ file:write_file(ConfFile, Conf),
+ rabbit_file:write_term_file(AdvancedFile, [Advanced]),
+ {ConfFile, AdvancedFile}.
+
+generate_config(ConfFile, AdvancedFile) ->
+ Context = rabbit_env:get_context(),
+ rabbit_prelaunch_conf:generate_config_from_cuttlefish_files(
+ Context, [ConfFile], AdvancedFile).
+
+without_rabbit_log(ErlangConfig) ->
+ case proplists:get_value(rabbit, ErlangConfig) of
+ undefined ->
+ ErlangConfig;
+ RabbitConfig ->
+ RabbitConfig1 = lists:keydelete(log, 1, RabbitConfig),
+ case RabbitConfig1 of
+ [] ->
+ lists:keydelete(rabbit, 1, ErlangConfig);
+ _ ->
+ lists:keystore(rabbit, 1, ErlangConfig,
+ {rabbit, RabbitConfig1})
+ end
+ end.
+
+deepsort(List) ->
+ case is_proplist(List) of
+ true ->
+ lists:keysort(1, lists:map(fun({K, V}) -> {K, deepsort(V)};
+ (V) -> V end,
+ List));
+ false ->
+ case is_list(List) of
+ true -> lists:sort(List);
+ false -> List
+ end
+ end.
+
+is_proplist([{_Key, _Val}|_] = List) -> lists:all(fun({_K, _V}) -> true; (_) -> false end, List);
+is_proplist(_) -> false.
diff --git a/deps/rabbitmq_ct_helpers/src/rabbit_ct_helpers.erl b/deps/rabbitmq_ct_helpers/src/rabbit_ct_helpers.erl
new file mode 100644
index 0000000000..12e7a158c8
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/src/rabbit_ct_helpers.erl
@@ -0,0 +1,1056 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_ct_helpers).
+
+-include_lib("common_test/include/ct.hrl").
+
+-deprecated({is_mixed_versions,1,"Use is_mixed_versions/0 instead"}).
+
+-export([
+ log_environment/0,
+ run_steps/2,
+ run_setup_steps/1, run_setup_steps/2,
+ run_teardown_steps/1, run_teardown_steps/2,
+ register_teardown_step/2,
+ register_teardown_steps/2,
+ guess_tested_erlang_app_name/1,
+ ensure_application_srcdir/3,
+ ensure_application_srcdir/4,
+ ensure_rabbitmqctl_cmd/1,
+ ensure_rabbitmqctl_app/1,
+ load_rabbitmqctl_app/1,
+ ensure_rabbitmq_plugins_cmd/1,
+ ensure_rabbitmq_queues_cmd/1,
+ init_skip_as_error_flag/1,
+ start_long_running_testsuite_monitor/1,
+ stop_long_running_testsuite_monitor/1,
+ config_to_testcase_name/2,
+ testcases/1,
+ testcase_number/3,
+ testcase_absname/2, testcase_absname/3,
+ testcase_started/2, testcase_finished/2,
+ term_checksum/1,
+ random_term_checksum/0,
+ exec/1, exec/2,
+ make/3, make/4,
+ get_config/2, get_config/3, set_config/2, delete_config/2,
+ merge_app_env/2, merge_app_env_in_erlconf/2,
+ get_app_env/4,
+ nodename_to_hostname/1,
+ convert_to_unicode_binary/1,
+ cover_work_factor/2,
+
+ is_mixed_versions/0,
+ is_mixed_versions/1,
+
+ await_condition/1,
+ await_condition/2,
+ await_condition_with_retries/2
+ ]).
+
+-define(SSL_CERT_PASSWORD, "test").
+
+%% -------------------------------------------------------------------
+%% Testsuite internal helpers.
+%% -------------------------------------------------------------------
+
+log_environment() ->
+ Vars = lists:sort(fun(A, B) -> A =< B end, os:getenv()),
+ case file:native_name_encoding() of
+ latin1 ->
+ ct:pal(?LOW_IMPORTANCE, "Environment variables:~n~s",
+ [[io_lib:format(" ~s~n", [V]) || V <- Vars]]);
+ utf8 ->
+ ct:pal(?LOW_IMPORTANCE, "Environment variables:~n~ts",
+ [[io_lib:format(" ~ts~n", [V]) || V <- Vars]])
+ end.
+
+run_setup_steps(Config) ->
+ run_setup_steps(Config, []).
+
+run_setup_steps(Config, ExtraSteps) ->
+ Steps = case os:getenv("RABBITMQ_RUN") of
+ false ->
+ [
+ fun init_skip_as_error_flag/1,
+ fun guess_tested_erlang_app_name/1,
+ fun ensure_secondary_umbrella/1,
+ fun ensure_current_srcdir/1,
+ fun ensure_rabbitmq_ct_helpers_srcdir/1,
+ fun ensure_erlang_mk_depsdir/1,
+ fun ensure_secondary_erlang_mk_depsdir/1,
+ fun ensure_secondary_current_srcdir/1,
+ fun ensure_rabbit_common_srcdir/1,
+ fun ensure_rabbitmq_cli_srcdir/1,
+ fun ensure_rabbit_srcdir/1,
+ fun ensure_make_cmd/1,
+ fun ensure_erl_call_cmd/1,
+ fun ensure_ssl_certs/1,
+ fun start_long_running_testsuite_monitor/1,
+ fun load_elixir/1
+ ];
+ _ ->
+ [
+ fun init_skip_as_error_flag/1,
+ fun ensure_secondary_umbrella/1,
+ fun ensure_current_srcdir/1,
+ fun ensure_rabbitmq_ct_helpers_srcdir/1,
+ fun maybe_rabbit_srcdir/1,
+ fun ensure_make_cmd/1,
+ fun ensure_rabbitmq_run_cmd/1,
+ fun ensure_rabbitmq_run_secondary_cmd/1,
+ fun ensure_ssl_certs/1,
+ fun start_long_running_testsuite_monitor/1
+ ]
+ end,
+ run_steps(Config, Steps ++ ExtraSteps).
+
+run_teardown_steps(Config) ->
+ run_teardown_steps(Config, []).
+
+run_teardown_steps(Config, ExtraSteps) ->
+ RegisteredSteps = get_config(Config, teardown_steps, []),
+ Steps = [
+ fun stop_long_running_testsuite_monitor/1,
+ fun symlink_priv_dir/1
+ ],
+ run_steps(Config, ExtraSteps ++ RegisteredSteps ++ Steps).
+
+register_teardown_step(Config, Step) ->
+ register_teardown_steps(Config, [Step]).
+
+register_teardown_steps(Config, Steps) ->
+ RegisteredSteps = get_config(Config, teardown_steps, []),
+ set_config(Config, {teardown_steps, Steps ++ RegisteredSteps}).
+
+run_steps(Config, [Step | Rest]) ->
+ SkipAsError = case get_config(Config, skip_as_error) of
+ undefined -> false;
+ Value -> Value
+ end,
+ case Step(Config) of
+ {skip, Reason} when SkipAsError ->
+ run_teardown_steps(Config),
+ exit(Reason);
+ {skip, _} = Error ->
+ run_teardown_steps(Config),
+ Error;
+ Config1 when is_list(Config1) ->
+ run_steps(Config1, Rest);
+ Other ->
+ ct:pal(?LOW_IMPORTANCE,
+ "~p:~p/~p failed with ~p steps remaining (Config value ~p is not a proplist)",
+ [?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY, length(Rest), Other]),
+ run_teardown_steps(Config),
+ exit("A setup step returned a non-proplist")
+ end;
+run_steps(Config, []) ->
+ Config.
+
+init_skip_as_error_flag(Config) ->
+ SkipAsError = case os:getenv("RABBITMQ_CT_SKIP_AS_ERROR") of
+ false -> false;
+ Value -> REOpts = [{capture, none}, caseless],
+ case re:run(Value, "^(1|yes|true)$", REOpts) of
+ nomatch -> false;
+ match -> true
+ end
+ end,
+ set_config(Config, {skip_as_error, SkipAsError}).
+
+guess_tested_erlang_app_name(Config) ->
+ case os:getenv("DIALYZER_PLT") of
+ false ->
+ {skip,
+ "plt file required, please set DIALYZER_PLT"};
+ Filename ->
+ AppName0 = filename:basename(Filename, ".plt"),
+ AppName = string:strip(AppName0, left, $.),
+ set_config(Config, {tested_erlang_app, list_to_atom(AppName)})
+ end.
+
+ensure_secondary_umbrella(Config) ->
+ Path = case get_config(Config, secondary_umbrella) of
+ undefined -> os:getenv("SECONDARY_UMBRELLA");
+ P -> P
+ end,
+ case Path =/= false andalso filelib:is_dir(Path) of
+ true -> set_config(Config, {secondary_umbrella, Path});
+ false -> set_config(Config, {secondary_umbrella, false})
+ end.
+
+ensure_current_srcdir(Config) ->
+ Path = case get_config(Config, current_srcdir) of
+ undefined -> os:getenv("PWD");
+ P -> P
+ end,
+ case filelib:is_dir(Path) of
+ true -> set_config(Config, {current_srcdir, Path});
+ false -> {skip,
+ "Current source directory required, " ++
+ "please set 'current_srcdir' in ct config"}
+ end.
+
+ensure_rabbitmq_ct_helpers_srcdir(Config) ->
+ Path = case get_config(Config, rabbitmq_ct_helpers_srcdir) of
+ undefined ->
+ filename:dirname(
+ filename:dirname(
+ code:which(?MODULE)));
+ P ->
+ P
+ end,
+ case filelib:is_dir(Path) of
+ true -> set_config(Config, {rabbitmq_ct_helpers_srcdir, Path});
+ false -> {skip,
+ "rabbitmq_ct_helpers source directory required, " ++
+ "please set 'rabbitmq_ct_helpers_srcdir' in ct config"}
+ end.
+
+ensure_erlang_mk_depsdir(Config) ->
+ Path = case get_config(Config, erlang_mk_depsdir) of
+ undefined ->
+ case os:getenv("DEPS_DIR") of
+ false ->
+ %% Try the common locations.
+ SrcDir = ?config(rabbitmq_ct_helpers_srcdir, Config),
+ Ds = [
+ filename:join(SrcDir, "deps"),
+ filename:join(SrcDir, "../../deps")
+ ],
+ case lists:filter(fun filelib:is_dir/1, Ds) of
+ [P |_] -> P;
+ [] -> false
+ end;
+ P ->
+ P
+ end;
+ P ->
+ P
+ end,
+ case Path =/= false andalso filelib:is_dir(Path) of
+ true -> set_config(Config, {erlang_mk_depsdir, Path});
+ false -> {skip,
+ "deps directory required, " ++
+ "please set DEPS_DIR or 'erlang_mk_depsdir' " ++
+ "in ct config"}
+ end.
+
+ensure_secondary_erlang_mk_depsdir(Config) ->
+ Path = case get_config(Config, secondary_erlang_mk_depsdir) of
+ undefined ->
+ case ?config(secondary_umbrella, Config) of
+ false -> ?config(erlang_mk_depsdir, Config);
+ SecUmbrella -> filename:join(SecUmbrella, "deps")
+ end;
+ P ->
+ P
+ end,
+ case filelib:is_dir(Path) of
+ true -> set_config(Config, {secondary_erlang_mk_depsdir, Path});
+ false -> {skip,
+ "Secondary deps directory required, " ++
+ "please set 'secondary_erlang_mk_depsdir' in ct config"}
+ end.
+
+ensure_secondary_current_srcdir(Config) ->
+ Path = case get_config(Config, secondary_current_srcdir) of
+ undefined ->
+ case ?config(secondary_umbrella, Config) of
+ false ->
+ ?config(current_srcdir, Config);
+ _ ->
+ TestedAppName = ?config(tested_erlang_app, Config),
+ filename:join(
+ ?config(secondary_erlang_mk_depsdir, Config),
+ TestedAppName)
+ end;
+ P ->
+ P
+ end,
+ case filelib:is_dir(Path) of
+ true -> set_config(Config, {secondary_current_srcdir, Path});
+ false -> {skip,
+ "Secondary current source directory required, " ++
+ "please set 'secondary_current_srcdir' in ct config"}
+ end.
+
+ensure_rabbit_common_srcdir(Config) ->
+ ensure_application_srcdir(Config, rabbit_common, rabbit_misc).
+
+ensure_rabbitmq_cli_srcdir(Config) ->
+ ensure_application_srcdir(Config, rabbitmq_cli, elixir, 'Elixir.RabbitMQCtl').
+
+ensure_rabbit_srcdir(Config) ->
+ ensure_application_srcdir(Config, rabbit, rabbit).
+
+maybe_rabbit_srcdir(Config) ->
+ % Some tests under bazel use this value, others do not.
+ % By allowing this config to be optional, we avoid making
+ % more tests depend on rabbit
+ case ensure_application_srcdir(Config, rabbit, rabbit) of
+ {skip, _} -> Config;
+ Config1 -> Config1
+ end.
+
+ensure_application_srcdir(Config, App, Module) ->
+ ensure_application_srcdir(Config, App, erlang, Module).
+
+ensure_application_srcdir(Config, App, Lang, Module) ->
+ AppS = atom_to_list(App),
+ Key = list_to_atom(AppS ++ "_srcdir"),
+ SecondaryKey = list_to_atom("secondary_" ++ AppS ++ "_srcdir"),
+ Path = case get_config(Config, Key) of
+ undefined ->
+ case code:which(Module) of
+ non_existing ->
+ filename:join(?config(erlang_mk_depsdir, Config), AppS);
+ P when Lang =:= erlang ->
+ %% P is $SRCDIR/ebin/$MODULE.beam.
+ filename:dirname(
+ filename:dirname(P));
+ P when Lang =:= elixir ->
+ %% P is $SRCDIR/_build/$MIX_ENV/lib/$APP/ebin/$MODULE.beam.
+ filename:dirname(
+ filename:dirname(
+ filename:dirname(
+ filename:dirname(
+ filename:dirname(
+ filename:dirname(P))))))
+ end;
+ P ->
+ P
+ end,
+ SecondaryPath = case ?config(secondary_umbrella, Config) of
+ false ->
+ Path;
+ _ ->
+ case get_config(Config, SecondaryKey) of
+ undefined ->
+ filename:join(
+ ?config(secondary_erlang_mk_depsdir,
+ Config),
+ AppS);
+ SP ->
+ SP
+ end
+ end,
+ case filelib:is_dir(Path) andalso filelib:is_dir(SecondaryPath) of
+ true -> set_config(Config,
+ [{Key, Path},
+ {SecondaryKey, SecondaryPath}]);
+ false -> {skip,
+ AppS ++ " source directory required, " ++
+ "please set '" ++ AppS ++ "_srcdir' in ct config"}
+ end.
+
+ensure_make_cmd(Config) ->
+ Make = case get_config(Config, make_cmd) of
+ undefined ->
+ case os:getenv("MAKE") of
+ false -> "make";
+ M -> M
+ end;
+ M ->
+ M
+ end,
+ Cmd = [Make, "--version"],
+ case exec(Cmd, [{match_stdout, "GNU Make"}]) of
+ {ok, _} -> set_config(Config, {make_cmd, Make});
+ _ -> {skip,
+ "GNU Make required, " ++
+ "please set MAKE or 'make_cmd' in ct config"}
+ end.
+
+ensure_rabbitmq_run_cmd(Config) ->
+ Path = os:getenv("RABBITMQ_RUN"),
+ case filelib:is_file(Path) of
+ true -> set_config(Config, {rabbitmq_run_cmd, Path});
+ false -> {skip,
+ "Bazel helper rabbitmq-run required, " ++
+ "please set RABBITMQ_RUN"}
+ end.
+
+ensure_rabbitmq_run_secondary_cmd(Config) ->
+ Path = os:getenv("RABBITMQ_RUN_SECONDARY"),
+ case filelib:is_file(Path) of
+ true ->
+ set_config(Config, {rabbitmq_run_secondary_cmd, Path});
+ false ->
+ Config
+ end.
+
+ensure_erl_call_cmd(Config) ->
+ ErlCallDir = code:lib_dir(erl_interface, bin),
+ ErlCall = filename:join(ErlCallDir, "erl_call"),
+ Cmd = [ErlCall],
+ case exec(Cmd, [{match_stdout, "Usage: "}]) of
+ {ok, _} -> set_config(Config, {erl_call_cmd, ErlCall});
+ _ -> {skip,
+ "erl_call required, " ++
+ "please set ERL_CALL or 'erl_call_cmd' in ct config"}
+ end.
+
+ensure_rabbitmqctl_cmd(Config) ->
+ Rabbitmqctl = case get_config(Config, rabbitmqctl_cmd) of
+ undefined ->
+ case os:getenv("RABBITMQCTL") of
+ false ->
+ find_script(Config, "rabbitmqctl");
+ R ->
+ ct:pal(?LOW_IMPORTANCE,
+ "Using rabbitmqctl from RABBITMQCTL: ~p~n", [R]),
+ R
+ end;
+ R ->
+ ct:pal(?LOW_IMPORTANCE,
+ "Using rabbitmqctl from rabbitmqctl_cmd: ~p~n", [R]),
+ R
+ end,
+ Error = {skip, "rabbitmqctl required, " ++
+ "please set RABBITMQCTL or 'rabbitmqctl_cmd' in ct config"},
+ case Rabbitmqctl of
+ false ->
+ Error;
+ _ ->
+ Cmd = [Rabbitmqctl],
+ Env = [
+ {"RABBITMQ_SCRIPTS_DIR", filename:dirname(Rabbitmqctl)}
+ ],
+ case exec(Cmd, [drop_stdout, {env, Env}]) of
+ {error, 64, _} ->
+ set_config(Config, {rabbitmqctl_cmd, Rabbitmqctl});
+ {error, Code, Reason} ->
+ ct:pal("Exec failed with exit code ~p: ~p", [Code, Reason]),
+ Error;
+ _ ->
+ Error
+ end
+ end.
+
+find_script(Config, Script) ->
+ Locations = [File
+ || File <- [new_script_location(Config, Script),
+ old_script_location(Config, Script)],
+ filelib:is_file(File)],
+ case Locations of
+ [Location | _] ->
+ ct:pal(?LOW_IMPORTANCE, "Using ~s at ~p~n", [Script, Location]),
+ Location;
+ [] ->
+ false
+ end.
+
+old_script_location(Config, Script) ->
+ SrcDir = ?config(rabbit_srcdir, Config),
+ filename:join([SrcDir, "scripts", Script]).
+
+new_script_location(Config, Script) ->
+ SrcDir = ?config(current_srcdir, Config),
+ filename:join([SrcDir, "sbin", Script]).
+
+ensure_rabbitmqctl_app(Config) ->
+ SrcDir = ?config(rabbitmq_cli_srcdir, Config),
+ MixEnv = os:getenv("MIX_ENV", "dev"),
+ EbinDir = filename:join(
+ [SrcDir, "_build", MixEnv, "lib", "rabbitmqctl", "ebin"]),
+ case filelib:is_file(filename:join(EbinDir, "rabbitmqctl.app")) of
+ true ->
+ true = code:add_path(EbinDir),
+ case application:load(rabbitmqctl) of
+ ok ->
+ Config;
+ {error, {already_loaded, rabbitmqctl}} ->
+ Config;
+ {error, _} ->
+ {skip, "Access to rabbitmq_cli ebin dir. required, " ++
+ "please build rabbitmq_cli and set MIX_ENV"}
+ end;
+ false ->
+ {skip, "Access to rabbitmq_cli ebin dir. required, " ++
+ "please build rabbitmq_cli and set MIX_ENV"}
+ end.
+
+load_rabbitmqctl_app(Config) ->
+ case application:load(rabbitmqctl) of
+ ok ->
+ Config;
+ {error, {already_loaded, rabbitmqctl}} ->
+ Config;
+ {error, Reason} ->
+ ct:pal(?LOW_IMPORTANCE,
+ "Failed to load rabbitmqctl application: ~p", [Reason]),
+ {skip, "Application rabbitmqctl could not be loaded, " ++
+ "please place compiled rabbitmq_cli on the code path"}
+ end.
+
+ensure_rabbitmq_plugins_cmd(Config) ->
+ Rabbitmqplugins = case get_config(Config, rabbitmq_plugins_cmd) of
+ undefined ->
+ case os:getenv("RABBITMQ_PLUGINS") of
+ false -> find_script(Config, "rabbitmq-plugins");
+ R -> R
+ end;
+ R ->
+ R
+ end,
+ Error = {skip, "rabbitmq_plugins required, " ++
+ "please set RABBITMQ_PLUGINS or 'rabbitmq_plugins_cmd' in ct config"},
+ case Rabbitmqplugins of
+ false ->
+ Error;
+ _ ->
+ Cmd = [Rabbitmqplugins],
+ Env = [
+ {"RABBITMQ_SCRIPTS_DIR", filename:dirname(Rabbitmqplugins)}
+ ],
+ case exec(Cmd, [drop_stdout, {env, Env}]) of
+ {error, 64, _} ->
+ set_config(Config, {rabbitmq_plugins_cmd, Rabbitmqplugins});
+ _ ->
+ Error
+ end
+ end.
+
+ensure_rabbitmq_queues_cmd(Config) ->
+ RabbitmqQueues = case get_config(Config, rabbitmq_queues_cmd) of
+ undefined ->
+ case os:getenv("RABBITMQ_QUEUES") of
+ false -> find_script(Config, "rabbitmq-queues");
+ R -> R
+ end;
+ R ->
+ ct:pal(?LOW_IMPORTANCE,
+ "Using rabbitmq-queues from rabbitmq_queues_cmd: ~p~n", [R]),
+ R
+ end,
+ Error = {skip, "rabbitmq-queues required, " ++
+ "please set 'rabbitmq_queues_cmd' in ct config"},
+ case RabbitmqQueues of
+ false ->
+ Error;
+ _ ->
+ Cmd = [RabbitmqQueues],
+ Env = [
+ {"RABBITMQ_SCRIPTS_DIR", filename:dirname(RabbitmqQueues)}
+ ],
+ case exec(Cmd, [drop_stdout, {env, Env}]) of
+ {error, 64, _} ->
+ set_config(Config,
+ {rabbitmq_queues_cmd,
+ RabbitmqQueues});
+ {error, Code, Reason} ->
+ ct:pal("Exec failed with exit code ~p: ~p", [Code, Reason]),
+ Error;
+ _ ->
+ Error
+ end
+ end.
+
+ensure_ssl_certs(Config) ->
+ SrcDir = ?config(rabbitmq_ct_helpers_srcdir, Config),
+ CertsMakeDir = filename:join([SrcDir, "tools", "tls-certs"]),
+ PrivDir = ?config(priv_dir, Config),
+ CertsDir = filename:join(PrivDir, "certs"),
+ CertsPwd = proplists:get_value(rmq_certspwd, Config, ?SSL_CERT_PASSWORD),
+ Cmd = [
+ "PASSWORD=" ++ CertsPwd,
+ "DIR=" ++ CertsDir],
+ case make(Config, CertsMakeDir, Cmd) of
+ {ok, _} ->
+ %% Add SSL certs to the broker configuration.
+ Verify = get_config(Config, rabbitmq_ct_tls_verify, verify_peer),
+ FailIfNoPeerCert = get_config(
+ Config,
+ rabbitmq_ct_tls_fail_if_no_peer_cert,
+ true) andalso Verify =/= verify_none,
+ Config1 = merge_app_env(Config,
+ {rabbit, [
+ {ssl_options, [
+ {cacertfile,
+ filename:join([CertsDir, "testca", "cacert.pem"])},
+ {certfile,
+ filename:join([CertsDir, "server", "cert.pem"])},
+ {keyfile,
+ filename:join([CertsDir, "server", "key.pem"])},
+ {verify, Verify},
+ {fail_if_no_peer_cert, FailIfNoPeerCert}
+ ]}]}),
+ set_config(Config1, {rmq_certsdir, CertsDir});
+ _ ->
+ {skip, "Failed to create SSL certificates"}
+ end.
+
+link_name(["deps", _ | Tail]) ->
+ case lists:reverse(Tail) of
+ ["logs" | Rest] ->
+ string:join(lists:reverse(["private_log" | Rest]), ".");
+ _ ->
+ string:join(Tail, ".")
+ end;
+link_name(X) -> X.
+
+get_selection_from_tc_logfile(["logs", _, S | _Tail]) ->
+ {ok, link_name(string:tokens(S, "."))};
+get_selection_from_tc_logfile([_ | Tail]) ->
+ get_selection_from_tc_logfile(Tail);
+get_selection_from_tc_logfile([]) -> not_found.
+
+get_selection(Config) ->
+ TcLogFile = ?config(tc_logfile, Config),
+ get_selection_from_tc_logfile(filename:split(TcLogFile)).
+
+
+symlink_priv_dir(Config) ->
+ case {os:type(), ?config(rabbitmq_run_cmd, Config)} of
+ {{win32, _}, _} ->
+ Config;
+ {_, Cmd} when Cmd =/= undefined ->
+ %% skip if bazel
+ Config;
+ _ ->
+ SrcDir = ?config(current_srcdir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ case get_selection(Config) of
+ {ok, Name} ->
+ Target = filename:join([SrcDir, "logs", Name]),
+ case exec(["ln", "-snf", PrivDir, Target]) of
+ {ok, _} -> ok;
+ _ -> ct:pal(?LOW_IMPORTANCE,
+ "Failed to symlink private_log directory.")
+ end,
+ Config;
+ not_found ->
+ ct:pal(?LOW_IMPORTANCE,
+ "Failed to symlink private_log directory."),
+ Config
+ end
+ end.
+
+%% -------------------------------------------------------------------
+%% Process to log a message every minute during long testcases.
+%% -------------------------------------------------------------------
+
+-define(PING_CT_INTERVAL, 60 * 1000). %% In milliseconds.
+
+start_long_running_testsuite_monitor(Config) ->
+ Pid = spawn(
+ fun() ->
+ {ok, TimerRef} = timer:send_interval(?PING_CT_INTERVAL, ping_ct),
+ long_running_testsuite_monitor(TimerRef, [])
+ end),
+ set_config(Config, {long_running_testsuite_monitor, Pid}).
+
+load_elixir(Config) ->
+ case find_elixir_home() of
+ {skip, _} = Skip ->
+ Skip;
+ ElixirLibDir ->
+ ct:pal(?LOW_IMPORTANCE, "Elixir lib dir: ~s~n", [ElixirLibDir]),
+ true = code:add_pathz(ElixirLibDir),
+ application:load(elixir),
+ {ok, _} = application:ensure_all_started(elixir),
+ Config
+ end.
+
+find_elixir_home() ->
+ ElixirExe = case os:type() of
+ {unix, _} -> "elixir";
+ {win32, _} -> "elixir.bat"
+ end,
+ case os:find_executable(ElixirExe) of
+ false -> {skip, "Failed to locate Elixir executable"};
+ ExePath ->
+ {ok, ElixirLibDir} = exec([ExePath, "--eval", "IO.write(:code.lib_dir(:elixir, :ebin))"], []),
+ ElixirLibDir
+ end.
+
+stop_long_running_testsuite_monitor(Config) ->
+ case get_config(Config, long_running_testsuite_monitor) of
+ undefined -> ok;
+ Pid -> Pid ! stop
+ end,
+ Config.
+
+long_running_testsuite_monitor(TimerRef, Testcases) ->
+ receive
+ {started, Testcase} ->
+ Testcases1 = [{Testcase, erlang:monotonic_time(seconds)}
+ | Testcases],
+ long_running_testsuite_monitor(TimerRef, Testcases1);
+ {finished, Testcase} ->
+ Testcases1 = proplists:delete(Testcase, Testcases),
+ long_running_testsuite_monitor(TimerRef, Testcases1);
+ ping_ct ->
+ T1 = erlang:monotonic_time(seconds),
+ ct:pal(?STD_IMPORTANCE, "Testcases still in progress:~s",
+ [[
+ begin
+ TDiff = format_time_diff(T1, T0),
+ rabbit_misc:format("~n - ~s (~s)", [TC, TDiff])
+ end
+ || {TC, T0} <- Testcases
+ ]]),
+ long_running_testsuite_monitor(TimerRef, Testcases);
+ stop ->
+ timer:cancel(TimerRef)
+ end.
+
+format_time_diff(T1, T0) ->
+ Diff = T1 - T0,
+ Hours = Diff div 3600,
+ Diff1 = Diff rem 3600,
+ Minutes = Diff1 div 60,
+ Seconds = Diff1 rem 60,
+ rabbit_misc:format("~b:~2..0b:~2..0b", [Hours, Minutes, Seconds]).
+
+testcase_started(Config, Testcase) ->
+ Testcase1 = config_to_testcase_name(Config, Testcase),
+ ?config(long_running_testsuite_monitor, Config) ! {started, Testcase1},
+ Config.
+
+testcase_finished(Config, Testcase) ->
+ Testcase1 = config_to_testcase_name(Config, Testcase),
+ ?config(long_running_testsuite_monitor, Config) ! {finished, Testcase1},
+ Config.
+
+config_to_testcase_name(Config, Testcase) ->
+ testcase_absname(Config, Testcase).
+
+testcase_absname(Config, Testcase) ->
+ testcase_absname(Config, Testcase, "/").
+
+testcase_absname(Config, Testcase, Sep) ->
+ Name = rabbit_misc:format("~s", [Testcase]),
+ case get_config(Config, tc_group_properties) of
+ [] ->
+ Name;
+ Props ->
+ Name1 = case Name of
+ "" ->
+ rabbit_misc:format("~s",
+ [proplists:get_value(name, Props)]);
+ _ ->
+ rabbit_misc:format("~s~s~s",
+ [proplists:get_value(name, Props), Sep, Name])
+ end,
+ testcase_absname1(Name1,
+ get_config(Config, tc_group_path), Sep)
+ end.
+
+testcase_absname1(Name, [Props | Rest], Sep) ->
+ Name1 = rabbit_misc:format("~s~s~s",
+ [proplists:get_value(name, Props), Sep, Name]),
+ testcase_absname1(Name1, Rest, Sep);
+testcase_absname1(Name, [], _) ->
+ lists:flatten(Name).
+
+testcases(Testsuite) ->
+ All = Testsuite:all(),
+ testcases1(Testsuite, All, [], []).
+
+testcases1(Testsuite, [{group, GroupName} | Rest], CurrentPath, Testcases) ->
+ Group = {GroupName, _, _} = lists:keyfind(GroupName, 1, Testsuite:groups()),
+ testcases1(Testsuite, [Group | Rest], CurrentPath, Testcases);
+testcases1(Testsuite, [{GroupName, _, Children} | Rest],
+ CurrentPath, Testcases) ->
+ Testcases1 = testcases1(Testsuite, Children,
+ [[{name, GroupName}] | CurrentPath], Testcases),
+ testcases1(Testsuite, Rest, CurrentPath, Testcases1);
+testcases1(Testsuite, [Testcase | Rest], CurrentPath, Testcases)
+when is_atom(Testcase) ->
+ {Props, Path} = case CurrentPath of
+ [] -> {[], []};
+ [H | T] -> {H, T}
+ end,
+ Name = config_to_testcase_name([
+ {tc_group_properties, Props},
+ {tc_group_path, Path}
+ ], Testcase),
+ testcases1(Testsuite, Rest, CurrentPath, [Name | Testcases]);
+testcases1(_, [], [], Testcases) ->
+ lists:reverse(Testcases);
+testcases1(_, [], _, Testcases) ->
+ Testcases.
+
+testcase_number(Config, TestSuite, TestName) ->
+ Testcase = config_to_testcase_name(Config, TestName),
+ Testcases = testcases(TestSuite),
+ testcase_number1(Testcases, Testcase, 0).
+
+testcase_number1([Testcase | _], Testcase, N) ->
+ N;
+testcase_number1([_ | Rest], Testcase, N) ->
+ testcase_number1(Rest, Testcase, N + 1);
+testcase_number1([], _, N) ->
+ N.
+
+%% -------------------------------------------------------------------
+%% Helpers for helpers.
+%% -------------------------------------------------------------------
+
+term_checksum(Term) ->
+ Bin = term_to_binary(Term),
+ <<Checksum:128/big-unsigned-integer>> = erlang:md5(Bin),
+ rabbit_misc:format("~32.16.0b", [Checksum]).
+
+random_term_checksum() ->
+ term_checksum(rabbit_misc:random(1000000)).
+
+exec(Cmd) ->
+ exec(Cmd, []).
+
+exec([Cmd | Args], Options) when is_list(Cmd) orelse is_binary(Cmd) ->
+ Cmd0 = case (lists:member($/, Cmd) orelse lists:member($\\, Cmd)) of
+ true ->
+ Cmd;
+ false ->
+ case os:find_executable(Cmd) of
+ false -> Cmd;
+ Path -> Path
+ end
+ end,
+ Cmd1 = convert_to_unicode_binary(
+ string:trim(
+ rabbit_data_coercion:to_list(Cmd0))),
+ Args1 = [convert_to_unicode_binary(format_arg(Arg)) || Arg <- Args],
+ {LocalOptions, PortOptions} = lists:partition(
+ fun
+ ({match_stdout, _}) -> true;
+ ({timeout, _}) -> true;
+ (drop_stdout) -> true;
+ (_) -> false
+ end, Options),
+ PortOptions1 = case lists:member(nouse_stdio, PortOptions) of
+ true -> PortOptions;
+ false -> [use_stdio, stderr_to_stdout | PortOptions]
+ end,
+ Log = "+ ~s (pid ~p)",
+ ExportedEnvVars = ["ERL_INETRC"],
+ ExportedEnv = lists:foldl(
+ fun(Var, Env) ->
+ case os:getenv(Var) of
+ false -> Env;
+ Value -> [{Var, Value} | Env]
+ end
+ end, [], ExportedEnvVars),
+ {PortOptions2, Log1} = case proplists:get_value(env, PortOptions1) of
+ undefined ->
+ {[{env, ExportedEnv} | PortOptions1], Log};
+ Env ->
+ Env1 = [
+ begin
+ Key1 = format_arg(Key),
+ Value1 = format_arg(Value),
+ Value2 = case is_binary(Value1) of
+ true -> binary_to_list(Value1);
+ false -> Value1
+ end,
+ {Key1, Value2}
+ end
+ || {Key, Value} <- Env
+ ],
+ {
+ [{env, Env1 ++ ExportedEnv}
+ | proplists:delete(env, PortOptions1)],
+ Log ++ "~n~nEnvironment variables:~n" ++
+ string:join(
+ [rabbit_misc:format(" ~s=~s", [K, V]) || {K, V} <- Env1],
+ "~n")
+ }
+ end,
+ %% Because Args1 may contain binaries, we don't use string:join().
+ %% Instead we do a list comprehension.
+ ArgsIoList = [Cmd1, [[$\s, Arg] || Arg <- Args1]],
+ ct:pal(?LOW_IMPORTANCE, Log1, [ArgsIoList, self()]),
+ try
+ Port = erlang:open_port(
+ {spawn_executable, Cmd1}, [
+ {args, Args1},
+ exit_status
+ | PortOptions2]),
+
+ case lists:keytake(timeout, 1, LocalOptions) of
+ false ->
+ port_receive_loop(Port, "", LocalOptions, infinity);
+ {value, {timeout, infinity}, LocalOptions1} ->
+ port_receive_loop(Port, "", LocalOptions1, infinity);
+ {value, {timeout, Timeout}, LocalOptions1} ->
+ Until = erlang:system_time(millisecond) + Timeout,
+ port_receive_loop(Port, "", LocalOptions1, Until)
+ end
+ catch
+ error:Reason ->
+ ct:pal(?LOW_IMPORTANCE, "~s: ~s",
+ [Cmd1, file:format_error(Reason)]),
+ {error, Reason, file:format_error(Reason)}
+ end.
+
+format_arg({Format, FormatArgs}) ->
+ rabbit_misc:format(Format, FormatArgs);
+format_arg(Arg) when is_atom(Arg) ->
+ atom_to_list(Arg);
+format_arg(Arg) ->
+ Arg.
+
+port_receive_loop(Port, Stdout, Options, Until) ->
+ port_receive_loop(Port, Stdout, Options, Until, stdout_dump_timer()).
+
+port_receive_loop(Port, Stdout, Options, Until, DumpTimer) ->
+ Timeout = case Until of
+ infinity -> infinity;
+ _ -> max(0, Until - erlang:system_time(millisecond))
+ end,
+ receive
+ {Port, {exit_status, X}} ->
+ timer:cancel(DumpTimer),
+ DropStdout = lists:member(drop_stdout, Options) orelse
+ Stdout =:= "",
+ if
+ DropStdout ->
+ ct:pal(?LOW_IMPORTANCE, "Exit code: ~p (pid ~p)",
+ [X, self()]);
+ true ->
+ ct:pal(?LOW_IMPORTANCE, "~ts~nExit code: ~p (pid ~p)",
+ [Stdout, X, self()])
+ end,
+ case proplists:get_value(match_stdout, Options) of
+ undefined ->
+ case X of
+ 0 -> {ok, Stdout};
+ _ -> {error, X, Stdout}
+ end;
+ RE ->
+ case re:run(Stdout, RE, [{capture, none}]) of
+ match -> {ok, Stdout};
+ nomatch -> {error, X, Stdout}
+ end
+ end;
+ dump_output ->
+ DropStdout = lists:member(drop_stdout, Options) orelse
+ Stdout =:= "",
+ if
+ DropStdout ->
+ ok;
+ true ->
+ ct:pal(?LOW_IMPORTANCE, "~ts~n[Command still in progress] (pid ~p)",
+ [Stdout, self()])
+ end,
+ port_receive_loop(Port, Stdout, Options, Until, stdout_dump_timer());
+ {Port, {data, Out}} ->
+ port_receive_loop(Port, Stdout ++ Out, Options, Until, DumpTimer)
+ after
+ Timeout ->
+ {error, timeout, Stdout}
+ end.
+
+stdout_dump_timer() ->
+ {ok, TRef} = timer:send_after(30000, dump_output),
+ TRef.
+
+make(Config, Dir, Args) ->
+ make(Config, Dir, Args, []).
+
+make(Config, Dir, Args, Options) ->
+ Make = rabbit_ct_vm_helpers:get_current_vm_config(Config, make_cmd),
+ Verbosity = case os:getenv("V") of
+ false -> [];
+ V -> ["V=" ++ V]
+ end,
+ Cmd = [Make, "-C", Dir] ++ Verbosity ++ Args,
+ exec(Cmd, Options).
+
+%% This is the same as ?config(), except this one doesn't log a warning
+%% if the key is missing.
+get_config(Config, Key) ->
+ proplists:get_value(Key, Config).
+
+get_config(Config, Key, Default) ->
+ proplists:get_value(Key, Config, Default).
+
+set_config(Config, Tuple) when is_tuple(Tuple) ->
+ Key = element(1, Tuple),
+ lists:keystore(Key, 1, Config, Tuple);
+set_config(Config, [Tuple | Rest]) ->
+ Config1 = set_config(Config, Tuple),
+ set_config(Config1, Rest);
+set_config(Config, []) ->
+ Config.
+
+delete_config(Config, Key) ->
+ proplists:delete(Key, Config).
+
+get_app_env(Config, App, Key, Default) ->
+ ErlangConfig = proplists:get_value(erlang_node_config, Config, []),
+ AppConfig = proplists:get_value(App, ErlangConfig, []),
+ proplists:get_value(Key, AppConfig, Default).
+
+merge_app_env(Config, Env) ->
+ ErlangConfig = proplists:get_value(erlang_node_config, Config, []),
+ ErlangConfig1 = merge_app_env_in_erlconf(ErlangConfig, Env),
+ set_config(Config, {erlang_node_config, ErlangConfig1}).
+
+merge_app_env_in_erlconf(ErlangConfig, {App, Env}) ->
+ AppConfig = proplists:get_value(App, ErlangConfig, []),
+ AppConfig1 = lists:foldl(
+ fun({Key, _} = Tuple, AC) ->
+ lists:keystore(Key, 1, AC, Tuple)
+ end, AppConfig, Env),
+ lists:keystore(App, 1, ErlangConfig, {App, AppConfig1});
+merge_app_env_in_erlconf(ErlangConfig, [Env | Rest]) ->
+ ErlangConfig1 = merge_app_env_in_erlconf(ErlangConfig, Env),
+ merge_app_env_in_erlconf(ErlangConfig1, Rest);
+merge_app_env_in_erlconf(ErlangConfig, []) ->
+ ErlangConfig.
+
+nodename_to_hostname(Nodename) when is_atom(Nodename) ->
+ [_, Hostname] = string:tokens(atom_to_list(Nodename), "@"),
+ Hostname.
+
+convert_to_unicode_binary(Arg) when is_list(Arg) ->
+ unicode:characters_to_binary(Arg);
+convert_to_unicode_binary(Arg) when is_binary(Arg) ->
+ Arg.
+
+is_mixed_versions() ->
+ os:getenv("SECONDARY_UMBRELLA") =/= false
+ orelse os:getenv("RABBITMQ_RUN_SECONDARY") =/= false.
+
+is_mixed_versions(Config) ->
+ get_config(Config, secondary_umbrella, false) =/= false
+ orelse get_config(Config, rabbitmq_run_secondary_cmd, false) =/= false.
+
+%% -------------------------------------------------------------------
+%% Assertions that retry
+%% -------------------------------------------------------------------
+
+await_condition(ConditionFun) ->
+ await_condition(ConditionFun, 10000).
+
+await_condition(ConditionFun, Timeout) ->
+ Retries = ceil(Timeout / 50),
+ await_condition_with_retries(ConditionFun, Retries).
+
+await_condition_with_retries(_ConditionFun, 0) ->
+ ct:fail("Condition did not materialize in the expected period of time");
+await_condition_with_retries(ConditionFun, RetriesLeft) ->
+ case ConditionFun() of
+ false ->
+ timer:sleep(50),
+ await_condition_with_retries(ConditionFun, RetriesLeft - 1);
+ true ->
+ ok
+ end.
+
+%% -------------------------------------------------------------------
+%% Cover-related functions.
+%% -------------------------------------------------------------------
+
+%% TODO.
+cover_work_factor(_Config, Without) ->
+ Without.
diff --git a/deps/rabbitmq_ct_helpers/src/rabbit_ct_proper_helpers.erl b/deps/rabbitmq_ct_helpers/src/rabbit_ct_proper_helpers.erl
new file mode 100644
index 0000000000..53349284f8
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/src/rabbit_ct_proper_helpers.erl
@@ -0,0 +1,21 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_ct_proper_helpers).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("proper/include/proper.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-export([run_proper/3]).
+
+run_proper(Fun, Args, NumTests) ->
+ ?assert(
+ proper:counterexample(erlang:apply(Fun, Args),
+ [{numtests, NumTests},
+ {on_output, fun(".", _) -> ok; % don't print the '.'s on new lines
+ (F, A) -> ct:pal(?LOW_IMPORTANCE, F, A) end}])).
diff --git a/deps/rabbitmq_ct_helpers/src/rabbit_ct_vm_helpers.erl b/deps/rabbitmq_ct_helpers/src/rabbit_ct_vm_helpers.erl
new file mode 100644
index 0000000000..ffdef1e62a
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/src/rabbit_ct_vm_helpers.erl
@@ -0,0 +1,1140 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_ct_vm_helpers).
+
+-include_lib("common_test/include/ct.hrl").
+
+-export([setup_steps/0,
+ teardown_steps/0,
+
+ get_ct_peers/1,
+ get_ct_peer/2,
+ get_ct_peer_configs/2,
+ get_ct_peer_config/2, get_ct_peer_config/3,
+ get_current_vm_config/2,
+ rpc/4, rpc/5,
+ rpc_all/3, rpc_all/4,
+
+ ensure_terraform_cmd/1,
+ determine_erlang_version/1,
+ determine_erlang_git_ref/1,
+ determine_elixir_version/1,
+ compute_code_path/1,
+ find_terraform_ssh_key/1,
+ set_terraform_files_suffix/1,
+ set_terraform_config_dirs/1,
+ set_terraform_state/1,
+ set_terraform_aws_ec2_region/1,
+ init_terraform/1,
+ compute_vpc_cidr_block/1,
+ find_erlang_mk/1,
+ find_rabbitmq_components/1,
+ list_dirs_to_upload/1,
+ list_dirs_to_download/1,
+ maybe_prepare_dirs_to_upload_archive/1,
+ spawn_terraform_vms/1, destroy_terraform_vms/1,
+ query_terraform_uuid/1,
+ query_ct_peer_nodenames_and_ipaddrs/1,
+ set_inet_hosts/1,
+ write_inetrc/1,
+ wait_for_ct_peers/1,
+ set_ct_peers_code_path/1,
+ start_ct_logs_proxies/1,
+ configure_ct_peers_environment/1,
+ download_dirs/1,
+ stop_ct_peers/1,
+
+ aws_direct_vms_module/1,
+ aws_autoscaling_group_module/1,
+ vms_query_module/1,
+
+ do_setup_proxy/2, proxy_loop/1,
+ prepare_dirs_to_download_archives/1
+ ]).
+
+-define(UPLOAD_DIRS_ARCHIVE_PREFIX, "dirs-archive-").
+-define(ERLANG_REMOTE_NODENAME, "control").
+
+setup_steps() ->
+ [
+ fun ensure_terraform_cmd/1,
+ fun determine_erlang_version/1,
+ fun determine_erlang_git_ref/1,
+ fun determine_elixir_version/1,
+ fun compute_code_path/1,
+ fun find_terraform_ssh_key/1,
+ fun set_terraform_files_suffix/1,
+ fun set_terraform_config_dirs/1,
+ fun set_terraform_state/1,
+ fun set_terraform_aws_ec2_region/1,
+ fun init_terraform/1,
+ fun compute_vpc_cidr_block/1,
+ fun find_erlang_mk/1,
+ fun find_rabbitmq_components/1,
+ fun list_dirs_to_upload/1,
+ fun list_dirs_to_download/1,
+ fun maybe_prepare_dirs_to_upload_archive/1,
+ fun spawn_terraform_vms/1,
+ fun query_terraform_uuid/1,
+ fun query_ct_peer_nodenames_and_ipaddrs/1,
+ fun set_inet_hosts/1,
+ fun write_inetrc/1,
+ fun wait_for_ct_peers/1,
+ fun set_ct_peers_code_path/1,
+ fun start_ct_logs_proxies/1,
+ fun configure_ct_peers_environment/1
+ ].
+
+teardown_steps() ->
+ [
+ fun download_dirs/1,
+ fun stop_ct_peers/1,
+ fun destroy_terraform_vms/1
+ ].
+
+ensure_terraform_cmd(Config) ->
+ Terraform = case rabbit_ct_helpers:get_config(Config, terraform_cmd) of
+ undefined ->
+ case os:getenv("TERRAFORM") of
+ false -> "terraform";
+ T -> T
+ end;
+ T ->
+ T
+ end,
+ Cmd = [Terraform, "--version"],
+ case rabbit_ct_helpers:exec(Cmd, [{match_stdout, "Terraform"}]) of
+ {ok, _} ->
+ rabbit_ct_helpers:set_config(Config, {terraform_cmd, Terraform});
+ _ ->
+ {skip, "terraform(1) required, " ++
+ "please set TERRAFORM or 'terraform_cmd' in ct config"}
+ end.
+
+determine_erlang_version(Config) ->
+ Version = case rabbit_ct_helpers:get_config(Config, erlang_version) of
+ undefined ->
+ case os:getenv("ERLANG_VERSION") of
+ false -> rabbit_misc:otp_release();
+ V -> V
+ end;
+ V ->
+ V
+ end,
+ Regex = "([0-9]+\.[0-9]+|R[0-9]+(?:[AB])[0-9]+).*",
+ ErlangVersion = re:replace(Version, Regex, "\\1"),
+ ct:pal(?LOW_IMPORTANCE, "Erlang version: ~s", [ErlangVersion]),
+ rabbit_ct_helpers:set_config(
+ Config, {erlang_version, ErlangVersion}).
+
+determine_erlang_git_ref(Config) ->
+ GitRef = case rabbit_ct_helpers:get_config(Config, erlang_git_ref) of
+ undefined ->
+ case os:getenv("ERLANG_GIT_REF") of
+ false ->
+ Version = erlang:system_info(system_version),
+ ReOpts = [{capture, all_but_first, list}],
+ Match = re:run(Version,
+ "source-([0-9a-fA-F]+)",
+ ReOpts),
+ case Match of
+ {match, [V]} -> V;
+ _ -> ""
+ end;
+ V ->
+ V
+ end;
+ V ->
+ V
+ end,
+ ct:pal(?LOW_IMPORTANCE, "Erlang Git reference: ~s", [GitRef]),
+ rabbit_ct_helpers:set_config(
+ Config, {erlang_git_ref, GitRef}).
+
+determine_elixir_version(Config) ->
+ Version = case rabbit_ct_helpers:get_config(Config, elixir_version) of
+ undefined ->
+ case os:getenv("ELIXIR_VERSION") of
+ false ->
+ Cmd = ["elixir", "-e", "IO.puts System.version"],
+ case rabbit_ct_helpers:exec(Cmd) of
+ {ok, Output} ->
+ string:strip(Output, right, $\n);
+ _ ->
+ ""
+ end;
+ V ->
+ V
+ end;
+ V ->
+ V
+ end,
+ ct:pal(?LOW_IMPORTANCE, "Elixir version: ~s", [Version]),
+ rabbit_ct_helpers:set_config(Config, {elixir_version, Version}).
+
+compute_code_path(Config) ->
+ EntireCodePath = code:get_path(),
+ CodePath = filter_out_erlang_code_path(EntireCodePath),
+ ct:pal(?LOW_IMPORTANCE, "Code path: ~p", [CodePath]),
+ rabbit_ct_helpers:set_config(Config, {erlang_code_path, CodePath}).
+
+filter_out_erlang_code_path(CodePath) ->
+ ErlangRoot = code:root_dir(),
+ ErlangRootLen = string:len(ErlangRoot),
+ lists:filter(
+ fun(Dir) ->
+ Dir =/= "." andalso
+ string:substr(Dir, 1, ErlangRootLen) =/= ErlangRoot
+ end, CodePath).
+
+find_terraform_ssh_key(Config) ->
+ Config1 =
+ case rabbit_ct_helpers:get_config(Config, terraform_ssh_key) of
+ undefined ->
+ case os:getenv("SSH_KEY") of
+ false ->
+ HomeDir = os:getenv("HOME"),
+ Glob = filename:join([HomeDir, ".ssh", "*terraform*"]),
+ Filenames = lists:sort(filelib:wildcard(Glob)),
+ PrivKeys = lists:filter(
+ fun(Filename) ->
+ filename:extension(Filename) =:= ""
+ andalso
+ test_ssh_key(Filename)
+ end, Filenames),
+ case PrivKeys of
+ [PrivKey | _] ->
+ rabbit_ct_helpers:set_config(
+ Config, {terraform_ssh_key, PrivKey});
+ _ ->
+ Config
+ end;
+ PrivKey ->
+ case test_ssh_key(PrivKey) of
+ true ->
+ rabbit_ct_helpers:set_config(
+ Config, {terraform_ssh_key, PrivKey});
+ false ->
+ Config
+ end
+ end;
+ PrivKey ->
+ case test_ssh_key(PrivKey) of
+ true ->
+ rabbit_ct_helpers:delete_config(
+ Config, terraform_ssh_key);
+ false ->
+ Config
+ end
+ end,
+ case rabbit_ct_helpers:get_config(Config1, terraform_ssh_key) of
+ undefined ->
+ {skip, "Private SSH key required, " ++
+ "please set SSH_KEY or terraform_ssh_key in ct config"};
+ _ ->
+ Config1
+ end.
+
+test_ssh_key(PrivKey) ->
+ filelib:is_regular(PrivKey)
+ andalso
+ filelib:is_regular(PrivKey ++ ".pub").
+
+set_terraform_files_suffix(Config) ->
+ case rabbit_ct_helpers:get_config(Config, terraform_files_suffix) of
+ undefined ->
+ Suffix = rabbit_ct_helpers:random_term_checksum(),
+ rabbit_ct_helpers:set_config(
+ Config, {terraform_files_suffix, Suffix});
+ _ ->
+ Config
+ end.
+
+aws_direct_vms_module(Config) ->
+ SrcDir = ?config(rabbitmq_ct_helpers_srcdir, Config),
+ filename:join([SrcDir, "tools", "terraform", "direct-vms"]).
+
+aws_autoscaling_group_module(Config) ->
+ SrcDir = ?config(rabbitmq_ct_helpers_srcdir, Config),
+ filename:join([SrcDir, "tools", "terraform", "autoscaling-group"]).
+
+vms_query_module(Config) ->
+ SrcDir = ?config(rabbitmq_ct_helpers_srcdir, Config),
+ filename:join([SrcDir, "tools", "terraform", "vms-query"]).
+
+set_terraform_config_dirs(Config) ->
+ SpawnTfConfigDir = aws_direct_vms_module(Config),
+ PollTfConfigDir = vms_query_module(Config),
+ Config1 = rabbit_ct_helpers:set_config(
+ Config, {terraform_poll_config_dir, PollTfConfigDir}),
+ SpawnTfConfigDir0 = rabbit_ct_helpers:get_config(
+ Config, terraform_config_dir),
+ case SpawnTfConfigDir0 of
+ undefined ->
+ rabbit_ct_helpers:set_config(
+ Config1, {terraform_config_dir, SpawnTfConfigDir});
+ _ ->
+ Config1
+ end.
+
+set_terraform_state(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Suffix = ?config(terraform_files_suffix, Config),
+ SpawnDataDir = rabbit_misc:format(".terraform-~s", [Suffix]),
+ SpawnStateFilename = rabbit_misc:format("terraform-~s.tfstate",
+ [Suffix]),
+ PollDataDir = rabbit_misc:format(".terraform-query-~s", [Suffix]),
+ PollStateFilename = rabbit_misc:format("terraform-query-~s.tfstate",
+ [Suffix]),
+ SpawnTfState = filename:join(PrivDir, SpawnStateFilename),
+ PollTfState = filename:join(PrivDir, PollStateFilename),
+ ct:pal(?LOW_IMPORTANCE, "Terraform state: ~s", [SpawnTfState]),
+ rabbit_ct_helpers:set_config(
+ Config, [{terraform_state, SpawnTfState},
+ {terraform_data_dir, SpawnDataDir},
+ {terraform_poll_state, PollTfState},
+ {terraform_poll_data_dir, PollDataDir}]).
+
+set_terraform_aws_ec2_region(Config) ->
+ case rabbit_ct_helpers:get_config(Config, terraform_aws_ec2_region) of
+ undefined ->
+ EC2Region = "eu-west-1",
+ rabbit_ct_helpers:set_config(
+ Config, {terraform_aws_ec2_region, EC2Region});
+ _ ->
+ Config
+ end.
+
+init_terraform(Config) ->
+ SpawnDataDir = ?config(terraform_data_dir, Config),
+ SpawnTfConfigDir = ?config(terraform_config_dir, Config),
+ PollDataDir = ?config(terraform_poll_data_dir, Config),
+ PollTfConfigDir = ?config(terraform_poll_config_dir, Config),
+ init_terraform_dirs(Config, [{SpawnTfConfigDir, SpawnDataDir},
+ {PollTfConfigDir, PollDataDir}]).
+
+init_terraform_dirs(Config, [{ConfigDir, DataDir} | Rest]) ->
+ Terraform = ?config(terraform_cmd, Config),
+ Env = [
+ {"TF_DATA_DIR", DataDir}
+ ],
+ Cmd = [
+ Terraform,
+ "init",
+ ConfigDir
+ ],
+ case rabbit_ct_helpers:exec(Cmd, [{env, Env}]) of
+ {ok, _} -> init_terraform_dirs(Config, Rest);
+ _ -> {skip, "Failed to init Terraform"}
+ end;
+init_terraform_dirs(Config, []) ->
+ Config.
+
+compute_vpc_cidr_block(Config) ->
+ LockId = {compute_vpc_cidr_block, self()},
+ LockNodes = [node()],
+ global:set_lock(LockId, LockNodes),
+ Seq = case os:getenv("NEXT_VPC_CIDR_BLOCK_SEQ") of
+ false -> 1;
+ V -> erlang:list_to_integer(V)
+ end,
+ os:putenv("NEXT_VPC_CIDR_BLOCK_SEQ", integer_to_list(Seq + 1)),
+ global:del_lock(LockId, LockNodes),
+ CidrBlock = rabbit_misc:format("10.~b.0.0/16", [Seq]),
+ rabbit_ct_helpers:set_config(Config, {terraform_vpc_cidr_block, CidrBlock}).
+
+find_in_srcdir_or_grandparent(Config, Name, ConfigKey) when is_atom(ConfigKey) ->
+ SrcDir = ?config(current_srcdir, Config),
+ SrcDirChild = filename:join([SrcDir, Name]),
+ SrcDirAncestor = filename:join([SrcDir, "..", "..", Name]),
+ case {filelib:is_regular(SrcDirChild), filelib:is_regular(SrcDirAncestor)} of
+ {true, _} -> rabbit_ct_helpers:set_config(Config, {ConfigKey, SrcDirChild});
+ {false, true} -> rabbit_ct_helpers:set_config(Config, {ConfigKey, SrcDirAncestor});
+ _ -> {skip, "Failed to find " ++ Name}
+ end.
+
+find_erlang_mk(Config) ->
+ find_in_srcdir_or_grandparent(Config, "erlang.mk", erlang_mk_path).
+
+find_rabbitmq_components(Config) ->
+ find_in_srcdir_or_grandparent(Config, "rabbitmq-components.mk", rabbitmq_components_path).
+
+list_dirs_to_upload(Config) ->
+ SrcDir = ?config(current_srcdir, Config),
+ LockId = {make_list_test_deps, self()},
+ LockNodes = [node()],
+ % `make list-test-deps` writes to a central file, a file we read
+ % later. Therefore we protect that write+read with a lock.
+ global:set_lock(LockId, LockNodes),
+ Ret = rabbit_ct_helpers:make(Config, SrcDir, ["list-test-deps"]),
+ case Ret of
+ {ok, _} ->
+ ListFile = filename:join([SrcDir,
+ ".erlang.mk",
+ "recursive-test-deps-list.log"]),
+ {ok, Content} = file:read_file(ListFile),
+ global:del_lock(LockId, LockNodes),
+ DepsDirs0 = string:tokens(binary_to_list(Content), "\n"),
+ DepsDirs = filter_out_subdirs(SrcDir, DepsDirs0),
+ ErlangMkPath = ?config(erlang_mk_path, Config),
+ RabbitmqComponentsPath = ?config(rabbitmq_components_path, Config),
+ AllDirs = lists:sort(
+ [SrcDir, ErlangMkPath, RabbitmqComponentsPath] ++ DepsDirs
+ ),
+ ct:pal(?LOW_IMPORTANCE, "Directories to upload: ~p", [AllDirs]),
+ rabbit_ct_helpers:set_config(Config, {dirs_to_upload, AllDirs});
+ _ ->
+ global:del_lock(LockId, LockNodes),
+ {skip, "Failed to get the list of test dependencies"}
+ end.
+
+list_dirs_to_download(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ PrivDirParent = filename:dirname(string:strip(PrivDir, right, $/)),
+ Dirs1 = case rabbit_ct_helpers:get_config(Config, dirs_to_download) of
+ undefined -> [PrivDirParent];
+ Dirs0 -> [PrivDirParent
+ | filter_out_subdirs(PrivDirParent, Dirs0)]
+ end,
+ Dirs = lists:sort(Dirs1),
+ ct:pal(?LOW_IMPORTANCE, "Directories to download: ~p", [Dirs]),
+ rabbit_ct_helpers:set_config(Config, {dirs_to_download, Dirs}).
+
+filter_out_subdirs(RootDir, Dirs) ->
+ RootDirLen = length(RootDir),
+ lists:filter(
+ fun(Dir) ->
+ Dir =/= RootDir andalso
+ string:sub_string(Dir, 1, RootDirLen + 1)
+ =/=
+ RootDir ++ "/"
+ end, Dirs).
+
+maybe_prepare_dirs_to_upload_archive(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Dirs = lists:sort(?config(dirs_to_upload, Config)),
+ Checksum = rabbit_ct_helpers:term_checksum(Dirs),
+ Archive = filename:join(
+ PrivDir,
+ rabbit_misc:format(
+ ?UPLOAD_DIRS_ARCHIVE_PREFIX "~s.tar.xz", [Checksum])),
+ Config1 = rabbit_ct_helpers:set_config(
+ Config, {upload_dirs_archive, Archive}),
+ LockId = {{dirs_to_upload, Archive}, self()},
+ LockNodes = [node()],
+ % The upload dirs archive is unique per set of directories.
+ % Therefore it can be shared by multiple setups. We want to create
+ % the archive once and certainly don't want to create it multiple
+ % times in parallel. Thus the lock.
+ global:set_lock(LockId, LockNodes),
+ case filelib:is_regular(Archive) of
+ true ->
+ global:del_lock(LockId, LockNodes),
+ Config1;
+ false ->
+ Config2 = prepare_dirs_to_upload_archive(Config1, Archive, Dirs),
+ global:del_lock(LockId, LockNodes),
+ Config2
+ end.
+
+prepare_dirs_to_upload_archive(Config, Archive, Dirs) ->
+ DirsList = string:join(
+ [rabbit_misc:format("~p", [Dir])
+ || Dir <- Dirs,
+ filelib:is_dir(Dir) orelse filelib:is_regular(Dir)],
+ " "),
+ Cmd = rabbit_misc:format(
+ "tar cf - -P"
+ " --exclude '.terraform*'"
+ " --exclude '" ?UPLOAD_DIRS_ARCHIVE_PREFIX "*'"
+ " --exclude '" ?ERLANG_REMOTE_NODENAME "@*'"
+ " ~s"
+ " | xz --threads=0 > ~p",
+ [DirsList, Archive]),
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Creating upload dirs archive `~s`:~n ~s",
+ [filename:basename(Archive), Cmd]),
+ case os:cmd(Cmd) of
+ "" ->
+ Config;
+ Output ->
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Failed to create upload dirs archive:~n~s",
+ [Output]),
+ {skip, "Failed to create upload dirs archive"}
+ end.
+
+spawn_terraform_vms(Config) ->
+ TfConfigDir = ?config(terraform_config_dir, Config),
+ TfDataDir = ?config(terraform_data_dir, Config),
+ TfState = ?config(terraform_state, Config),
+ TfVarFlags = terraform_var_flags(Config),
+ Terraform = ?config(terraform_cmd, Config),
+ Env = [
+ {"TF_DATA_DIR", TfDataDir}
+ ],
+ Cmd = [
+ Terraform,
+ "apply",
+ "-auto-approve=true",
+ {"-state=~s", [TfState]}
+ ] ++ TfVarFlags ++ [
+ TfConfigDir
+ ],
+ case rabbit_ct_helpers:exec(Cmd, [{env, Env}]) of
+ {ok, _} ->
+ Config1 = rabbit_ct_helpers:set_config(
+ Config, {terraform_query_mode, direct}),
+ % FIXME: This `register_teardown_steps()` function is just
+ % wrong currently: when run_steps() is used at the end of
+ % e.g. a testcase to run testcase-specific teardown steps,
+ % the registered steps are not executed.
+ rabbit_ct_helpers:register_teardown_steps(
+ Config1, teardown_steps());
+ _ ->
+ destroy_terraform_vms(Config),
+ {skip, "Terraform failed to spawn VM"}
+ end.
+
+destroy_terraform_vms(Config) ->
+ TfConfigDir = ?config(terraform_config_dir, Config),
+ TfDataDir = ?config(terraform_data_dir, Config),
+ TfState = ?config(terraform_state, Config),
+ TfVarFlags = terraform_var_flags(Config),
+ Terraform = ?config(terraform_cmd, Config),
+ Env = [
+ {"TF_DATA_DIR", TfDataDir}
+ ],
+ Cmd = [
+ Terraform,
+ "destroy",
+ "-auto-approve=true",
+ {"-state=~s", [TfState]}
+ ] ++ TfVarFlags ++ [
+ TfConfigDir
+ ],
+ rabbit_ct_helpers:exec(Cmd, [{env, Env}]),
+ Config.
+
+terraform_var_flags(Config) ->
+ ErlangVersion = ?config(erlang_version, Config),
+ GitRef = ?config(erlang_git_ref, Config),
+ ElixirVersion = ?config(elixir_version, Config),
+ SshKey = ?config(terraform_ssh_key, Config),
+ Suffix = ?config(terraform_files_suffix, Config),
+ EC2Region = ?config(terraform_aws_ec2_region, Config),
+ InstanceCount = instance_count(Config),
+ InstanceName0 = rabbit_ct_helpers:get_config(Config, terraform_instance_name),
+ InstanceName = case InstanceName0 of
+ undefined -> Suffix;
+ _ -> InstanceName0
+ end,
+ CidrBlock = ?config(terraform_vpc_cidr_block, Config),
+ ErlangApp = rabbit_ct_helpers:get_config(Config, tested_erlang_app),
+ InstanceNamePrefix = case ErlangApp of
+ undefined ->
+ "RabbitMQ testing: ";
+ _ ->
+ rabbit_misc:format("~s: ", [ErlangApp])
+ end,
+ TestedApp = ?config(tested_erlang_app, Config),
+ _ = application:load(TestedApp),
+ InstanceNameSuffix = case application:get_key(TestedApp, vsn) of
+ {ok, AppVer} -> " - " ++ AppVer;
+ undefined -> ""
+ end,
+ ct:pal(?LOW_IMPORTANCE, "Number of VMs requested: ~b", [InstanceCount]),
+ Archive = ?config(upload_dirs_archive, Config),
+ [
+ {"-var=erlang_version=~s", [ErlangVersion]},
+ {"-var=erlang_git_ref=~s", [GitRef]},
+ {"-var=elixir_version=~s", [ElixirVersion]},
+ {"-var=erlang_cookie=~s", [erlang:get_cookie()]},
+ {"-var=erlang_nodename=~s", [?ERLANG_REMOTE_NODENAME]},
+ {"-var=ssh_key=~s", [SshKey]},
+ {"-var=instance_count=~b", [InstanceCount]},
+ {"-var=instance_name=~s", [InstanceName]},
+ {"-var=upload_dirs_archive=~s", [Archive]},
+ {"-var=vpc_cidr_block=~s", [CidrBlock]},
+ {"-var=files_suffix=~s", [Suffix]},
+ {"-var=aws_ec2_region=~s", [EC2Region]},
+ {"-var=instance_name_prefix=~s", [InstanceNamePrefix]},
+ {"-var=instance_name_suffix=~s", [InstanceNameSuffix]}
+ ].
+
+instance_count(Config) ->
+ InstanceCount0 = rabbit_ct_helpers:get_config(
+ Config, terraform_instance_count),
+ case InstanceCount0 of
+ undefined -> 1;
+ N when is_integer(N) andalso N >= 1 -> N
+ end.
+
+query_terraform_uuid(Config) ->
+ Terraform = ?config(terraform_cmd, Config),
+ TfState = ?config(terraform_state, Config),
+ Cmd = [
+ Terraform,
+ "output",
+ "-no-color",
+ {"-state=~s", [TfState]},
+ "uuid"
+ ],
+ case rabbit_ct_helpers:exec(Cmd) of
+ {ok, Output} ->
+ Uuid = string:strip(string:strip(Output, right, $\n)),
+ rabbit_ct_helpers:set_config(Config, {terraform_uuid, Uuid});
+ _ ->
+ {skip, "Terraform failed to query unique ID"}
+ end.
+
+query_ct_peer_nodenames_and_ipaddrs(Config) ->
+ case query_terraform_map(Config, "ct_peer_nodenames") of
+ {ok, NodenamesMap} ->
+ case query_terraform_map(Config, "ct_peer_ipaddrs") of
+ {ok, IPAddrsMap} ->
+ initialize_ct_peers(Config, NodenamesMap, IPAddrsMap);
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+query_terraform_map(Config, Var) ->
+ QueryMode = ?config(terraform_query_mode, Config),
+ case QueryMode of
+ direct ->
+ query_terraform_map_directly(Config, Var);
+ polling ->
+ poll_terraform_map(Config, Var)
+ end.
+
+query_terraform_map_directly(Config, Var) ->
+ TfState = ?config(terraform_state, Config),
+ case do_query_terraform_map(Config, TfState, Var) of
+ {skip, _} ->
+ Config1 = rabbit_ct_helpers:set_config(
+ Config, {terraform_query_mode, polling}),
+ query_terraform_map(Config1, Var);
+ Ret ->
+ Ret
+ end.
+
+poll_terraform_map(Config, Var) ->
+ case poll_vms(Config) of
+ {skip, _} = Error ->
+ Error;
+ Config1 ->
+ query_terraform_map_from_poll_state(Config1, Var)
+ end.
+
+poll_vms(Config) ->
+ case rabbit_ct_helpers:get_config(Config, terraform_poll_done) of
+ undefined ->
+ Timeout = 5 * 60 * 1000,
+ {ok, TRef} = timer:send_after(Timeout, terraform_poll_timeout),
+ do_poll_vms(Config, TRef);
+ true ->
+ Config
+ end.
+
+do_poll_vms(Config, TRef) ->
+ TfConfigDir = ?config(terraform_poll_config_dir, Config),
+ TfDataDir = ?config(terraform_poll_data_dir, Config),
+ TfState = ?config(terraform_poll_state, Config),
+ Uuid = ?config(terraform_uuid, Config),
+ Terraform = ?config(terraform_cmd, Config),
+ Env = [
+ {"TF_DATA_DIR", TfDataDir}
+ ],
+ Cmd = [
+ Terraform,
+ "apply",
+ "-auto-approve=true",
+ {"-state=~s", [TfState]},
+ {"-var=uuid=~s", [Uuid]},
+ {"-var=erlang_nodename=~s", [?ERLANG_REMOTE_NODENAME]},
+ TfConfigDir
+ ],
+ case rabbit_ct_helpers:exec(Cmd, [{env, Env}]) of
+ {ok, _} -> ensure_instance_count(Config, TRef);
+ _ -> {skip, "Terraform failed to query VMs"}
+ end.
+
+ensure_instance_count(Config, TRef) ->
+ Terraform = ?config(terraform_cmd, Config),
+ TfState = ?config(terraform_poll_state, Config),
+ Cmd = [
+ Terraform,
+ "output",
+ "-no-color",
+ {"-state=~s", [TfState]},
+ "instance_count"
+ ],
+ case rabbit_ct_helpers:exec(Cmd) of
+ {ok, Output} ->
+ CountStr = string:strip(string:strip(Output, right, $\n)),
+ Current = erlang:list_to_integer(CountStr),
+ Requested = instance_count(Config),
+ ct:pal(?LOW_IMPORTANCE,
+ "Number of VMs ready: ~b (at least ~b requested)",
+ [Current, Requested]),
+ if
+ Current < Requested ->
+ receive
+ terraform_poll_timeout ->
+ {skip, "Terraform failed to query VMs (timeout)"}
+ after 5000 ->
+ poll_vms(Config)
+ end;
+ true ->
+ timer:cancel(TRef),
+ rabbit_ct_helpers:set_config(Config,
+ {terraform_poll_done, true})
+ end;
+ _ ->
+ {skip, "Terraform failed to query VMs"}
+ end.
+
+query_terraform_map_from_poll_state(Config, Var) ->
+ TfState = ?config(terraform_poll_state, Config),
+ do_query_terraform_map(Config, TfState, Var).
+
+do_query_terraform_map(Config, TfState, Var) ->
+ Terraform = ?config(terraform_cmd, Config),
+ Cmd = [
+ Terraform,
+ "output",
+ "-no-color",
+ {"-state=~s", [TfState]},
+ Var
+ ],
+ case rabbit_ct_helpers:exec(Cmd) of
+ {ok, Output} ->
+ Map = parse_terraform_map(Output),
+ ct:pal(?LOW_IMPORTANCE, "Terraform map: ~p", [Map]),
+ {ok, Map};
+ _ ->
+ {skip, "Terraform failed to query VMs"}
+ end.
+
+parse_terraform_map(Output) ->
+ Lines = [string:strip(L, right, $,)
+ || L <- string:tokens(
+ string:strip(Output, right, $\n),
+ "\n"),
+ string:find(L, "=") =/= nomatch],
+ [begin
+ [K0, V0] = string:tokens(L, "="),
+ K = string:strip(string:strip(K0), both, $"),
+ V = string:strip(string:strip(V0), both, $"),
+ {K, V}
+ end || L <- Lines].
+
+initialize_ct_peers(Config, NodenamesMap, IPAddrsMap) ->
+ CTPeers = lists:map(
+ fun({Hostname, NodenameStr}) ->
+ Nodename = list_to_atom(NodenameStr),
+ IPAddrStr = proplists:get_value(Hostname, IPAddrsMap),
+ {ok, IPAddr} = inet:parse_strict_address(IPAddrStr),
+ {Nodename,
+ [
+ {hostname, Hostname},
+ {ipaddr, IPAddr},
+ % FIXME: We assume some kind of Linux
+ % distribution here.
+ {make_cmd, "make"}
+ ]}
+ end, NodenamesMap),
+ ct:pal(?LOW_IMPORTANCE, "Remote Erlang nodes: ~p", [CTPeers]),
+ rabbit_ct_helpers:set_config(Config, {ct_peers, CTPeers}).
+
+set_inet_hosts(Config) ->
+ CTPeers = get_ct_peer_entries(Config),
+ inet_db:set_lookup([file, native]),
+ [begin
+ Hostname = ?config(hostname, CTPeerConfig),
+ IPAddr = ?config(ipaddr, CTPeerConfig),
+ inet_db:add_host(IPAddr, [Hostname]),
+ rabbit_misc:format("{host, ~p, [~p]}.~n",
+ [IPAddr, Hostname])
+ end || {_, CTPeerConfig} <- CTPeers],
+ Config.
+
+write_inetrc(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Suffix = ?config(terraform_files_suffix, Config),
+ Filename = filename:join(
+ PrivDir,
+ rabbit_misc:format("inetrc-~s", [Suffix])),
+ LockId = {erlang_inetrc, self()},
+ LockNodes = [node()],
+ % We write an `inetrc` file per setup so there is no risk of
+ % conflict here. However, we want to set the `$ERL_INETRC`
+ % environment variable (i.e. something global) because it's exported
+ % later by rabbit_ct_helpers:exec() to sub-processes. The lock here
+ % ensures we query inetrc, write the file and set `$ERL_INETRC`
+ % atomically: we don't want to point the environment variable to an
+ % old copy of `inetrc`.
+ global:set_lock(LockId, LockNodes),
+ Inetrc = inet:get_rc(),
+ Lines0 = lists:filter(
+ fun
+ ({host, _, _}) -> true;
+ ({lookup, _}) -> true;
+ (_) -> false
+ end, Inetrc),
+ Lines = [io_lib:format("~p.~n", [Line]) || Line <- Lines0],
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Erlang inetrc:~n~s",
+ [string:strip([" " ++ Line || Line <- Lines], right, $\n)]),
+ case file:write_file(Filename, Lines) of
+ ok ->
+ os:putenv("ERL_INETRC", Filename),
+ global:del_lock(LockId, LockNodes),
+ Config;
+ {error, Reason} ->
+ global:del_lock(LockId, LockNodes),
+ ct:pal(?LOW_IMPORTANCE, "Failed to write inetrc: ~p~n", [Reason]),
+ {skip, "Failed to write inetrc"}
+ end.
+
+wait_for_ct_peers(Config) ->
+ CTPeers = get_ct_peers(Config),
+ Timeout = 40 * 60 * 1000,
+ {ok, TRef} = timer:send_after(Timeout, ct_peers_timeout),
+ wait_for_ct_peers(Config, CTPeers, TRef).
+
+wait_for_ct_peers(Config, [CTPeer | Rest] = CTPeers, TRef) ->
+ case net_adm:ping(CTPeer) of
+ pong ->
+ ct:pal(?LOW_IMPORTANCE, "Remote Erlang node ~p ready", [CTPeer]),
+ wait_for_ct_peers(Config, Rest, TRef);
+ pang ->
+ receive
+ ct_peers_timeout ->
+ ct:pal(?LOW_IMPORTANCE,
+ "Remote Erlang node ~p didn't respond to pings",
+ [CTPeer]),
+ {skip, "Failed to ping remote Erlang nodes (timeout)"}
+ after 5000 ->
+ wait_for_ct_peers(Config, CTPeers, TRef)
+ end
+ end;
+wait_for_ct_peers(Config, [], TRef) ->
+ timer:cancel(TRef),
+ Config.
+
+set_ct_peers_code_path(Config) ->
+ CodePath = ?config(erlang_code_path, Config),
+ rpc_all(Config, code, add_pathsa, [lists:reverse(CodePath)]),
+ Config.
+
+start_ct_logs_proxies(Config) ->
+ CTPeers = get_ct_peers(Config),
+ do_setup_ct_logs_proxies(CTPeers),
+ Config.
+
+configure_ct_peers_environment(Config) ->
+ Vars = ["DEPS_DIR"],
+ Values = [{Var, Value}
+ || Var <- Vars,
+ Value <- [os:getenv(Var)],
+ Value =/= false],
+ ct:pal(?LOW_IMPORTANCE,
+ "Env. variables to set on remote VMs: ~p~n", [Values]),
+ lists:foreach(
+ fun({Var, Value}) ->
+ rpc_all(Config, os, putenv, [Var, Value])
+ end, Values),
+ Config.
+
+download_dirs(Config) ->
+ ConfigsPerCTPeer = rpc_all(
+ Config,
+ ?MODULE,
+ prepare_dirs_to_download_archives,
+ [Config]),
+ inets:start(),
+ download_dirs(Config, ConfigsPerCTPeer).
+
+download_dirs(_, [{skip, _} = Error | _]) ->
+ Error;
+download_dirs(Config, [ConfigPerCTPeer | Rest]) ->
+ Urls = ?config(download_dirs_archive_urls, ConfigPerCTPeer),
+ case download_urls(Config, Urls) of
+ {skip, _} = Error -> Error;
+ Config1 -> download_dirs(Config1, Rest)
+ end;
+download_dirs(Config, []) ->
+ Config.
+
+download_urls(Config, [Url | Rest]) ->
+ PrivDir = ?config(priv_dir, Config),
+ Headers = [{"connection", "close"}],
+ Options = [{body_format, binary}],
+ ct:pal(?LOW_IMPORTANCE, "Fetching download dirs archive at `~s`", [Url]),
+ Ret = httpc:request(get, {Url, Headers}, [], Options),
+ case Ret of
+ {ok, {{_, 200, _}, _, Body}} ->
+ Archive = filename:join(PrivDir, filename:basename(Url)),
+ case file:write_file(Archive, Body) of
+ ok ->
+ download_urls(Config, Rest);
+ {error, Reason} ->
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Failed to write download dirs archive `~s` "
+ "to `~s`: ~p",
+ [Url, Archive, Reason]),
+ {skip, "Failed to write download dirs archive"}
+ end;
+ _ ->
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Failed to download dirs archive `~s`: ~p",
+ [Url, Ret]),
+ {skip, "Failed to download dirs archive"}
+ end;
+download_urls(Config, []) ->
+ Config.
+
+prepare_dirs_to_download_archives(Config) ->
+ CTPeer = node(),
+ Dirs = ?config(dirs_to_download, Config),
+ prepare_dirs_to_download_archives(Config, CTPeer, Dirs, 1).
+
+prepare_dirs_to_download_archives(Config, CTPeer, [Dir | Rest], I) ->
+ Config1 = case filelib:is_dir(Dir) of
+ true ->
+ prepare_dirs_to_download_archive(
+ Config, CTPeer, Dir, I);
+ false ->
+ Config
+ end,
+ case Config1 of
+ {skip, _} = Error ->
+ Error;
+ _ ->
+ prepare_dirs_to_download_archives(Config1, CTPeer, Rest, I + 1)
+ end;
+prepare_dirs_to_download_archives(Config, _, [], _) ->
+ start_http_server(Config).
+
+prepare_dirs_to_download_archive(Config, CTPeer, Dir, I) ->
+ PrivDir = ?config(priv_dir, Config),
+ Archive = rabbit_misc:format(
+ "~s-~b-~s.tar.gz", [CTPeer, I, filename:basename(Dir)]),
+ FilesList = [File || File <- filelib:wildcard("**", Dir),
+ not filelib:is_dir(filename:join(Dir, File))],
+ ct:pal(?LOW_IMPORTANCE, "Creating download dirs archive `~s`", [Archive]),
+ Ret = erl_tar:create(
+ filename:join(PrivDir, Archive),
+ [{File, filename:join(Dir, File)} || File <- FilesList],
+ [compressed]),
+ case Ret of
+ ok ->
+ add_archive_to_list(Config, Archive);
+ {error, Reason} ->
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Failed to create download dirs archive `~s` for dir `~s`: ~p",
+ [Archive, Dir, Reason]),
+ {skip, "Failed to create download dirs archive"}
+ end.
+
+add_archive_to_list(Config, Archive) ->
+ AL = rabbit_ct_helpers:get_config(Config, download_dir_archives),
+ ArchivesList = case AL of
+ undefined -> [];
+ _ -> AL
+ end,
+ rabbit_ct_helpers:set_config(
+ Config, {download_dir_archives, [Archive | ArchivesList]}).
+
+start_http_server(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ {ok, Hostname} = inet:gethostname(),
+ inets:start(),
+ Options = [{port, 0},
+ {server_name, Hostname},
+ {server_root, PrivDir},
+ {document_root, PrivDir},
+ {keep_alive, false}],
+ case inets:start(httpd, Options) of
+ {ok, Pid} ->
+ HttpInfo = httpd:info(Pid),
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Ready to serve download dirs archive at `~s`",
+ [archive_name_to_url(HttpInfo, "")]),
+ archive_names_to_urls(Config, HttpInfo);
+ {error, Reason} ->
+ ct:pal(
+ ?LOW_IMPORTANCE,
+ "Failed to start HTTP server to serve download dirs "
+ "archives: ~p",
+ [Reason]),
+ {skiip, "Failed to start dirs archive HTTP server"}
+ end.
+
+archive_names_to_urls(Config, HttpInfo) ->
+ ArchivesList = ?config(download_dir_archives, Config),
+ UrlsList = [archive_name_to_url(HttpInfo, Archive)
+ || Archive <- ArchivesList],
+ rabbit_ct_helpers:set_config(
+ Config, {download_dirs_archive_urls, UrlsList}).
+
+archive_name_to_url(HttpInfo, Archive) ->
+ Hostname = proplists:get_value(server_name, HttpInfo),
+ Port = proplists:get_value(port, HttpInfo),
+ rabbit_misc:format("http://~s:~b/~s", [Hostname, Port, Archive]).
+
+stop_ct_peers(Config) ->
+ CTPeers = get_ct_peers(Config),
+ stop_ct_peers(Config, CTPeers).
+
+stop_ct_peers(Config, [CTPeer | Rest]) ->
+ erlang:monitor_node(CTPeer, true),
+ rpc(Config, CTPeer, init, stop),
+ receive
+ {nodedown, CTPeer} -> ok
+ end,
+ stop_ct_peers(Config, Rest);
+stop_ct_peers(Config, []) ->
+ Config.
+
+%% -------------------------------------------------------------------
+%% CT logs + user I/O proxying.
+%% -------------------------------------------------------------------
+
+do_setup_ct_logs_proxies(Nodes) ->
+ [begin
+ user_io_proxy(Node),
+ ct_logs_proxy(Node)
+ end || Node <- Nodes].
+
+user_io_proxy(Node) ->
+ ok = setup_proxy(Node, user).
+
+ct_logs_proxy(Node) ->
+ ok = setup_proxy(Node, ct_logs).
+
+setup_proxy(Node, RegName) ->
+ case whereis(RegName) of
+ undefined ->
+ ok;
+ Pid ->
+ ok = rpc:call(Node, ?MODULE, do_setup_proxy, [RegName, Pid])
+ end.
+
+do_setup_proxy(RegName, Pid) ->
+ case whereis(RegName) of
+ undefined ->
+ ok;
+ OldProxy ->
+ erlang:unregister(RegName),
+ erlang:exit(OldProxy, normal)
+ end,
+ ProxyPid = erlang:spawn(?MODULE, proxy_loop, [Pid]),
+ true = erlang:register(RegName, ProxyPid),
+ ok.
+
+proxy_loop(UpstreamPid) ->
+ receive
+ Msg ->
+ UpstreamPid ! Msg,
+ proxy_loop(UpstreamPid)
+ end.
+
+%% -------------------------------------------------------------------
+%% Other helpers.
+%% -------------------------------------------------------------------
+
+get_ct_peer_entries(Config) ->
+ case rabbit_ct_helpers:get_config(Config, ct_peers) of
+ undefined -> [];
+ CTPeers -> CTPeers
+ end.
+
+get_ct_peer_entry(Config, VM) when is_integer(VM) andalso VM >= 0 ->
+ CTPeers = get_ct_peer_entries(Config),
+ case VM < length(CTPeers) of
+ true -> lists:nth(VM + 1, CTPeers);
+ false -> throw({out_of_bound_ct_peer, VM, CTPeers})
+ end;
+get_ct_peer_entry(Config, VM) when is_atom(VM) ->
+ CTPeers = get_ct_peer_entries(Config),
+ case proplists:lookup(VM, CTPeers) of
+ none -> throw({unknown_ct_peer, VM, CTPeers});
+ CTPeer -> CTPeer
+ end.
+
+get_ct_peers(Config) ->
+ [CTPeer || {CTPeer, _} <- get_ct_peer_entries(Config)].
+
+get_ct_peer(Config, VM) ->
+ {CTPeer, _} = get_ct_peer_entry(Config, VM),
+ CTPeer.
+
+get_ct_peer_configs(Config, Key) ->
+ CTPeerEntries = get_ct_peer_entries(Config),
+ [?config(Key, CTPeerConfig) || {_, CTPeerConfig} <- CTPeerEntries].
+
+get_ct_peer_config(Config, VM) ->
+ {_, CTPeerConfig} = get_ct_peer_entry(Config, VM),
+ CTPeerConfig.
+
+get_ct_peer_config(Config, VM, Key) ->
+ CTPeerConfig = get_ct_peer_config(Config, VM),
+ ?config(Key, CTPeerConfig).
+
+get_current_vm_config(Config, Key) ->
+ try
+ CTPeerConfig = get_ct_peer_config(Config, node()),
+ case rabbit_ct_helpers:get_config(CTPeerConfig, Key) of
+ undefined ->
+ ?config(Key, Config);
+ Value ->
+ Value
+ end
+ catch
+ throw:{Reason, _, _} when
+ Reason =:= out_of_bound_ct_peer orelse
+ Reason =:= unknown_ct_peer ->
+ ?config(Key, Config)
+ end.
+
+rpc(Config, VM, Module, Function) ->
+ rpc(Config, VM, Module, Function, []).
+
+rpc(Config, VM, Module, Function, Args)
+ when is_integer(VM) orelse is_atom(VM) ->
+ CTPeer = get_ct_peer(Config, VM),
+ %% We add some directories to the remote node search path.
+ rabbit_ct_broker_helpers:add_code_path_to_node(CTPeer, Module),
+ Ret = rpc:call(CTPeer, Module, Function, Args),
+ case Ret of
+ {badrpc, {'EXIT', Reason}} -> exit(Reason);
+ {badrpc, Reason} -> exit(Reason);
+ Ret -> Ret
+ end;
+rpc(Config, VMs, Module, Function, Args)
+ when is_list(VMs) ->
+ [rpc(Config, VM, Module, Function, Args) || VM <- VMs].
+
+rpc_all(Config, Module, Function) ->
+ rpc_all(Config, Module, Function, []).
+
+rpc_all(Config, Module, Function, Args) ->
+ CTPeers = get_ct_peers(Config),
+ rpc(Config, CTPeers, Module, Function, Args).
diff --git a/deps/rabbitmq_ct_helpers/src/rabbit_mgmt_test_util.erl b/deps/rabbitmq_ct_helpers/src/rabbit_mgmt_test_util.erl
new file mode 100644
index 0000000000..b82bfe4651
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/src/rabbit_mgmt_test_util.erl
@@ -0,0 +1,323 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_mgmt_test_util).
+
+-include("rabbit_mgmt_test.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-compile([nowarn_export_all, export_all]).
+
+reset_management_settings(Config) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env,
+ [rabbit, collect_statistics_interval, 5000]),
+ Config.
+
+merge_stats_app_env(Config, Interval, SampleInterval) ->
+ Config1 = rabbit_ct_helpers:merge_app_env(
+ Config, {rabbit, [{collect_statistics_interval, Interval}]}),
+ rabbit_ct_helpers:merge_app_env(
+ Config1, {rabbitmq_management_agent, [{sample_retention_policies,
+ [{global, [{605, SampleInterval}]},
+ {basic, [{605, SampleInterval}]},
+ {detailed, [{10, SampleInterval}]}] }]}).
+http_get_from_node(Config, Node, Path) ->
+ {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
+ req(Config, Node, get, Path, [auth_header("guest", "guest")]),
+ assert_code(?OK, CodeAct, "GET", Path, ResBody),
+ decode(?OK, Headers, ResBody).
+
+http_get(Config, Path) ->
+ http_get(Config, Path, ?OK).
+
+http_get(Config, Path, CodeExp) ->
+ http_get(Config, Path, "guest", "guest", CodeExp).
+
+http_get(Config, Path, User, Pass, CodeExp) ->
+ {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
+ req(Config, 0, get, Path, [auth_header(User, Pass)]),
+ assert_code(CodeExp, CodeAct, "GET", Path, ResBody),
+ decode(CodeExp, Headers, ResBody).
+
+http_get_as_proplist(Config, Path) ->
+ {ok, {{_HTTP, CodeAct, _}, _Headers, ResBody}} =
+ req(Config, get, Path, [auth_header("guest", "guest")]),
+ assert_code(?OK, CodeAct, "GET", Path, ResBody),
+ JSON = rabbit_data_coercion:to_binary(ResBody),
+ cleanup(rabbit_json:decode(JSON, [{return_maps, false}])).
+
+http_get_no_map(Config, Path) ->
+ http_get_as_proplist(Config, Path).
+
+http_get_no_auth(Config, Path, CodeExp) ->
+ {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
+ req(Config, 0, get, Path, []),
+ assert_code(CodeExp, CodeAct, "GET", Path, ResBody),
+ decode(CodeExp, Headers, ResBody).
+
+http_put(Config, Path, List, CodeExp) ->
+ http_put_raw(Config, Path, format_for_upload(List), CodeExp).
+
+http_put(Config, Path, List, User, Pass, CodeExp) ->
+ http_put_raw(Config, Path, format_for_upload(List), User, Pass, CodeExp).
+
+http_post(Config, Path, List, CodeExp) ->
+ http_post_raw(Config, Path, format_for_upload(List), CodeExp).
+
+http_post(Config, Path, List, User, Pass, CodeExp) ->
+ http_post_raw(Config, Path, format_for_upload(List), User, Pass, CodeExp).
+
+http_post_accept_json(Config, Path, List, CodeExp) ->
+ http_post_accept_json(Config, Path, List, "guest", "guest", CodeExp).
+
+http_post_accept_json(Config, Path, List, User, Pass, CodeExp) ->
+ http_post_raw(Config, Path, format_for_upload(List), User, Pass, CodeExp,
+ [{"Accept", "application/json"}]).
+
+assert_permanent_redirect(Config, Path, ExpectedLocation) ->
+ Node = 0,
+ Uri = uri_base_from(Config, Node, Path),
+ ExpectedResponseCode = 301,
+ {ok, {{_, ExpectedResponseCode, _}, Headers, _}} =
+ httpc:request(get, {Uri, []}, ?HTTPC_OPTS, []),
+ Prefix = get_uri_prefix(Config),
+ ?assertEqual(Prefix ++ ExpectedLocation,
+ proplists:get_value("location", Headers)).
+
+req(Config, Type, Path, Headers) ->
+ req(Config, 0, Type, Path, Headers).
+
+req(Config, Node, get_static, Path, Headers) ->
+ httpc:request(get, {uri_base_from(Config, Node, "") ++ Path, Headers}, ?HTTPC_OPTS, []);
+req(Config, Node, Type, Path, Headers) ->
+ httpc:request(Type, {uri_base_from(Config, Node) ++ Path, Headers}, ?HTTPC_OPTS, []).
+
+req(Config, Node, Type, Path, Headers, Body) ->
+ ContentType = case proplists:get_value("content-type", Headers) of
+ undefined ->
+ "application/json";
+ CT ->
+ CT
+ end,
+ httpc:request(Type, {uri_base_from(Config, Node) ++ Path, Headers, ContentType, Body}, ?HTTPC_OPTS, []).
+
+uri_base_from(Config, Node) ->
+ uri_base_from(Config, Node, "api").
+uri_base_from(Config, Node, Base) ->
+ Port = mgmt_port(Config, Node),
+ Prefix = get_uri_prefix(Config),
+ Uri = rabbit_mgmt_format:print("http://localhost:~w~s/~s", [Port, Prefix, Base]),
+ binary_to_list(Uri).
+
+get_uri_prefix(Config) ->
+ ErlNodeCnf = proplists:get_value(erlang_node_config, Config, []),
+ MgmtCnf = proplists:get_value(rabbitmq_management, ErlNodeCnf, []),
+ proplists:get_value(path_prefix, MgmtCnf, "").
+
+auth_header(Username, Password) when is_binary(Username) ->
+ auth_header(binary_to_list(Username), Password);
+auth_header(Username, Password) when is_binary(Password) ->
+ auth_header(Username, binary_to_list(Password));
+auth_header(Username, Password) ->
+ {"Authorization",
+ "Basic " ++ binary_to_list(base64:encode(Username ++ ":" ++ Password))}.
+
+amqp_port(Config) ->
+ config_port(Config, tcp_port_amqp).
+
+mgmt_port(Config, Node) ->
+ config_port(Config, Node, tcp_port_mgmt).
+
+config_port(Config, PortKey) ->
+ config_port(Config, 0, PortKey).
+
+config_port(Config, Node, PortKey) ->
+ rabbit_ct_broker_helpers:get_node_config(Config, Node, PortKey).
+
+http_put_raw(Config, Path, Body, CodeExp) ->
+ http_upload_raw(Config, put, Path, Body, "guest", "guest", CodeExp, []).
+
+http_put_raw(Config, Path, Body, User, Pass, CodeExp) ->
+ http_upload_raw(Config, put, Path, Body, User, Pass, CodeExp, []).
+
+
+http_post_raw(Config, Path, Body, CodeExp) ->
+ http_upload_raw(Config, post, Path, Body, "guest", "guest", CodeExp, []).
+
+http_post_raw(Config, Path, Body, User, Pass, CodeExp) ->
+ http_upload_raw(Config, post, Path, Body, User, Pass, CodeExp, []).
+
+http_post_raw(Config, Path, Body, User, Pass, CodeExp, MoreHeaders) ->
+ http_upload_raw(Config, post, Path, Body, User, Pass, CodeExp, MoreHeaders).
+
+
+http_upload_raw(Config, Type, Path, Body, User, Pass, CodeExp, MoreHeaders) ->
+ {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
+ req(Config, 0, Type, Path, [auth_header(User, Pass)] ++ MoreHeaders, Body),
+ assert_code(CodeExp, CodeAct, Type, Path, ResBody),
+ decode(CodeExp, Headers, ResBody).
+
+http_delete(Config, Path, CodeExp) ->
+ http_delete(Config, Path, "guest", "guest", CodeExp).
+
+http_delete(Config, Path, CodeExp, Body) ->
+ http_delete(Config, Path, "guest", "guest", CodeExp, Body).
+
+http_delete(Config, Path, User, Pass, CodeExp, Body) ->
+ {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
+ req(Config, 0, delete, Path, [auth_header(User, Pass)], Body),
+ assert_code(CodeExp, CodeAct, "DELETE", Path, ResBody),
+ decode(CodeExp, Headers, ResBody).
+
+http_delete(Config, Path, User, Pass, CodeExp) ->
+ {ok, {{_HTTP, CodeAct, _}, Headers, ResBody}} =
+ req(Config, 0, delete, Path, [auth_header(User, Pass)]),
+ assert_code(CodeExp, CodeAct, "DELETE", Path, ResBody),
+ decode(CodeExp, Headers, ResBody).
+
+format_for_upload(none) ->
+ <<"">>;
+format_for_upload(List) ->
+ iolist_to_binary(rabbit_json:encode(List)).
+
+assert_code({one_of, CodesExpected}, CodeAct, Type, Path, Body) when is_list(CodesExpected) ->
+ case lists:member(CodeAct, CodesExpected) of
+ true ->
+ ok;
+ false ->
+ error({expected, CodesExpected, got, CodeAct, type, Type,
+ path, Path, body, Body})
+ end;
+assert_code({group, '2xx'} = CodeExp, CodeAct, Type, Path, Body) ->
+ case CodeAct of
+ 200 -> ok;
+ 201 -> ok;
+ 202 -> ok;
+ 203 -> ok;
+ 204 -> ok;
+ 205 -> ok;
+ 206 -> ok;
+ _ -> error({expected, CodeExp, got, CodeAct, type, Type,
+ path, Path, body, Body})
+ end;
+assert_code({group, '3xx'} = CodeExp, CodeAct, Type, Path, Body) ->
+ case CodeAct of
+ 300 -> ok;
+ 301 -> ok;
+ 302 -> ok;
+ 303 -> ok;
+ 304 -> ok;
+ 305 -> ok;
+ 306 -> ok;
+ 307 -> ok;
+ _ -> error({expected, CodeExp, got, CodeAct, type, Type,
+ path, Path, body, Body})
+ end;
+assert_code({group, '4xx'} = CodeExp, CodeAct, Type, Path, Body) ->
+ case CodeAct of
+ 400 -> ok;
+ 401 -> ok;
+ 402 -> ok;
+ 403 -> ok;
+ 404 -> ok;
+ 405 -> ok;
+ 406 -> ok;
+ 407 -> ok;
+ 408 -> ok;
+ 409 -> ok;
+ 410 -> ok;
+ 411 -> ok;
+ 412 -> ok;
+ 413 -> ok;
+ 414 -> ok;
+ 415 -> ok;
+ 416 -> ok;
+ 417 -> ok;
+ _ -> error({expected, CodeExp, got, CodeAct, type, Type,
+ path, Path, body, Body})
+ end;
+assert_code(CodeExp, CodeAct, Type, Path, Body) when is_list(CodeExp) ->
+ assert_code({one_of, CodeExp}, CodeAct, Type, Path, Body);
+assert_code(CodeExp, CodeAct, Type, Path, Body) ->
+ case CodeExp of
+ CodeAct -> ok;
+ _ -> error({expected, CodeExp, got, CodeAct, type, Type,
+ path, Path, body, Body})
+ end.
+
+decode(?OK, _Headers, ResBody) ->
+ JSON = rabbit_data_coercion:to_binary(ResBody),
+ cleanup(rabbit_json:decode(JSON));
+decode(_, Headers, _ResBody) -> Headers.
+
+cleanup(L) when is_list(L) ->
+ [cleanup(I) || I <- L];
+cleanup(M) when is_map(M) ->
+ maps:fold(fun(K, V, Acc) ->
+ Acc#{binary_to_atom(K, latin1) => cleanup(V)}
+ end, #{}, M);
+cleanup(I) ->
+ I.
+
+%% @todo There wasn't a specific order before; now there is; maybe we shouldn't have one?
+assert_list(Exp, Act) ->
+ case length(Exp) == length(Act) of
+ true -> ok;
+ false -> error({expected, Exp, actual, Act})
+ end,
+ [case length(lists:filter(fun(ActI) -> test_item(ExpI, ActI) end, Act)) of
+ 1 -> ok;
+ N -> error({found, N, ExpI, in, Act})
+ end || ExpI <- Exp].
+ %_ = [assert_item(ExpI, ActI) || {ExpI, ActI} <- lists:zip(Exp, Act)],
+
+assert_item(ExpI, [H | _] = ActI) when is_list(ActI) ->
+ %% just check first item of the list
+ assert_item(ExpI, H),
+ ok;
+assert_item(ExpI, ActI) ->
+ ?assertEqual(ExpI, maps:with(maps:keys(ExpI), ActI)),
+ ok.
+
+assert_item_kv(Exp, Act) when is_list(Exp) ->
+ case test_item0_kv(Exp, Act) of
+ [] -> ok;
+ Or -> error(Or)
+ end.
+
+test_item(Exp, Act) ->
+ case test_item0(Exp, Act) of
+ [] -> true;
+ _ -> false
+ end.
+
+test_item0(Exp, Act) ->
+ [{did_not_find, KeyExpI, in, Act} || KeyExpI <- maps:keys(Exp),
+ maps:get(KeyExpI, Exp) =/= maps:get(KeyExpI, Act, null)].
+
+test_item0_kv(Exp, Act) ->
+ [{did_not_find, ExpI, in, Act} || ExpI <- Exp,
+ not lists:member(ExpI, Act)].
+
+assert_keys(Exp, Act) ->
+ case test_key0(Exp, Act) of
+ [] -> ok;
+ Or -> error(Or)
+ end.
+
+test_key0(Exp, Act) ->
+ [{did_not_find, ExpI, in, Act} || ExpI <- Exp,
+ not maps:is_key(ExpI, Act)].
+assert_no_keys(NotExp, Act) ->
+ case test_no_key0(NotExp, Act) of
+ [] -> ok;
+ Or -> error(Or)
+ end.
+
+test_no_key0(Exp, Act) ->
+ [{invalid_key, ExpI, in, Act} || ExpI <- Exp,
+ maps:is_key(ExpI, Act)].
diff --git a/deps/rabbitmq_ct_helpers/test/terraform_SUITE.erl b/deps/rabbitmq_ct_helpers/test/terraform_SUITE.erl
new file mode 100644
index 0000000000..f2b7c7b93e
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/test/terraform_SUITE.erl
@@ -0,0 +1,166 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(terraform_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-export([all/0,
+ groups/0,
+ init_per_suite/1, end_per_suite/1,
+ init_per_group/2, end_per_group/2,
+ init_per_testcase/2, end_per_testcase/2,
+
+ run_code_on_one_vm/1, do_run_code_on_one_vm/1,
+ run_code_on_three_vms/1, do_run_code_on_three_vms/1,
+ run_one_rabbitmq_node/1,
+ run_four_rabbitmq_nodes/1
+ ]).
+
+all() ->
+ [
+ {group, direct_vms},
+ {group, autoscaling_group}
+ ].
+
+groups() ->
+ [
+ {direct_vms, [parallel], [{group, run_code},
+ {group, run_rabbitmq}]},
+ {autoscaling_group, [parallel], [{group, run_code},
+ {group, run_rabbitmq}]},
+
+ {run_code, [parallel], [run_code_on_one_vm,
+ run_code_on_three_vms]},
+ {run_rabbitmq, [parallel], [run_one_rabbitmq_node,
+ run_four_rabbitmq_nodes]}
+ ].
+
+init_per_suite(Config) ->
+ rabbit_ct_helpers:log_environment(),
+ rabbit_ct_helpers:run_setup_steps(Config).
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config).
+
+init_per_group(autoscaling_group, Config) ->
+ TfConfigDir = rabbit_ct_vm_helpers:aws_autoscaling_group_module(Config),
+ rabbit_ct_helpers:set_config(
+ Config, {terraform_config_dir, TfConfigDir});
+init_per_group(Group, Config) ->
+ rabbit_ct_helpers:set_config(
+ Config, {run_rabbitmq, Group =:= run_rabbitmq}).
+
+end_per_group(_Group, Config) ->
+ Config.
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ RunRabbitMQ = ?config(run_rabbitmq, Config),
+ InstanceCount = case Testcase of
+ run_code_on_three_vms -> 3;
+ run_three_rabbitmq_nodes -> 3;
+ % We want more RabbitMQs than VMs.
+ run_four_rabbitmq_nodes -> 3;
+ _ -> 1
+ end,
+ InstanceName = rabbit_ct_helpers:testcase_absname(Config, Testcase),
+ ClusterSize = case Testcase of
+ run_one_rabbitmq_node -> 1;
+ run_three_rabbitmq_nodes -> 3;
+ % We want more RabbitMQs than VMs.
+ run_four_rabbitmq_nodes -> 4;
+ _ -> 0
+ end,
+ Config1 = rabbit_ct_helpers:set_config(
+ Config,
+ [{terraform_instance_count, InstanceCount},
+ {terraform_instance_name, InstanceName},
+ {rmq_nodename_suffix, Testcase},
+ {rmq_nodes_count, ClusterSize}]),
+ case RunRabbitMQ of
+ false ->
+ rabbit_ct_helpers:run_steps(
+ Config1,
+ rabbit_ct_vm_helpers:setup_steps());
+ true ->
+ rabbit_ct_helpers:run_steps(
+ Config1,
+ [fun rabbit_ct_broker_helpers:run_make_dist/1] ++
+ rabbit_ct_vm_helpers:setup_steps() ++
+ rabbit_ct_broker_helpers:setup_steps_for_vms())
+ end.
+
+end_per_testcase(Testcase, Config) ->
+ RunRabbitMQ = ?config(run_rabbitmq, Config),
+ Config1 = case RunRabbitMQ of
+ false ->
+ rabbit_ct_helpers:run_steps(
+ Config,
+ rabbit_ct_vm_helpers:teardown_steps());
+ true ->
+ rabbit_ct_helpers:run_steps(
+ Config,
+ rabbit_ct_broker_helpers:teardown_steps_for_vms() ++
+ rabbit_ct_vm_helpers:teardown_steps())
+ end,
+ rabbit_ct_helpers:testcase_finished(Config1, Testcase).
+
+%% -------------------------------------------------------------------
+%% Run arbitrary code.
+%% -------------------------------------------------------------------
+
+run_code_on_one_vm(Config) ->
+ rabbit_ct_vm_helpers:rpc_all(Config,
+ ?MODULE, do_run_code_on_one_vm, [node()]).
+
+do_run_code_on_one_vm(CTMaster) ->
+ CTPeer = node(),
+ ct:pal("Testcase running on ~s", [CTPeer]),
+ ?assertNotEqual(CTMaster, CTPeer),
+ ?assertEqual(pong, net_adm:ping(CTMaster)).
+
+run_code_on_three_vms(Config) ->
+ rabbit_ct_vm_helpers:rpc_all(Config,
+ ?MODULE, do_run_code_on_three_vms, [node()]).
+
+do_run_code_on_three_vms(CTMaster) ->
+ CTPeer = node(),
+ ct:pal("Testcase running on ~s", [CTPeer]),
+ ?assertNotEqual(CTMaster, CTPeer),
+ ?assertEqual(pong, net_adm:ping(CTMaster)).
+
+%% -------------------------------------------------------------------
+%% Run RabbitMQ node.
+%% -------------------------------------------------------------------
+
+run_one_rabbitmq_node(Config) ->
+ CTPeers = rabbit_ct_vm_helpers:get_ct_peers(Config),
+ ?assertEqual([false],
+ [rabbit:is_running(CTPeer) || CTPeer <- CTPeers]),
+ RabbitMQNodes = rabbit_ct_broker_helpers:get_node_configs(Config, nodename),
+ ?assertEqual([true],
+ [rabbit:is_running(RabbitMQNode) || RabbitMQNode <- RabbitMQNodes]).
+
+run_four_rabbitmq_nodes(Config) ->
+ CTPeers = rabbit_ct_vm_helpers:get_ct_peers(Config),
+ ?assertEqual([false, false, false],
+ [rabbit:is_running(CTPeer) || CTPeer <- CTPeers]),
+ RabbitMQNodes = lists:sort(
+ rabbit_ct_broker_helpers:get_node_configs(
+ Config, nodename)),
+ ?assertEqual([true, true, true, true],
+ [rabbit:is_running(Node) || Node <- RabbitMQNodes]),
+
+ ?assertEqual([true, true, true, true],
+ rabbit_ct_broker_helpers:rpc_all(
+ Config, rabbit_mnesia, is_clustered, [])),
+ ClusteredNodes = lists:sort(
+ rabbit_ct_broker_helpers:rpc(
+ Config, 0, rabbit_mnesia, cluster_nodes, [running])),
+ ?assertEqual(ClusteredNodes, RabbitMQNodes).
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/main.tf b/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/main.tf
new file mode 100644
index 0000000000..d9aad6bf6b
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/main.tf
@@ -0,0 +1,78 @@
+# vim:sw=2:et:
+
+provider "aws" {
+ region = "eu-west-1"
+}
+
+module "direct_vms" {
+ source = "../direct-vms"
+
+ instance_count = 0
+
+ erlang_version = var.erlang_version
+ erlang_git_ref = var.erlang_git_ref
+ elixir_version = var.elixir_version
+ erlang_cookie = var.erlang_cookie
+ erlang_nodename = var.erlang_nodename
+ ssh_key = var.ssh_key
+ upload_dirs_archive = var.upload_dirs_archive
+ instance_name_prefix = var.instance_name_prefix
+ instance_name = var.instance_name
+ instance_name_suffix = var.instance_name_suffix
+ vpc_cidr_block = var.vpc_cidr_block
+ files_suffix = var.files_suffix
+ aws_ec2_region = var.aws_ec2_region
+}
+
+resource "aws_launch_configuration" "lc" {
+ name_prefix = module.direct_vms.resource_prefix
+
+ image_id = module.direct_vms.instance_ami
+ instance_type = module.direct_vms.instance_type
+ key_name = module.direct_vms.ssh_key_name
+
+ security_groups = module.direct_vms.security_groups
+
+ user_data = module.direct_vms.instance_user_data
+
+ lifecycle {
+ create_before_destroy = true
+ }
+}
+
+resource "aws_autoscaling_group" "asg" {
+ name_prefix = module.direct_vms.resource_prefix
+ launch_configuration = aws_launch_configuration.lc.name
+ min_size = var.instance_count
+ max_size = var.instance_count
+ desired_capacity = var.instance_count
+
+ vpc_zone_identifier = [module.direct_vms.subnet_id]
+
+ tags = [
+ {
+ key = "Name"
+ value = "${module.direct_vms.instance_name} (ASG)"
+ propagate_at_launch = true
+ },
+ {
+ key = "rabbitmq-testing"
+ value = true
+ propagate_at_launch = true
+ },
+ {
+ key = "rabbitmq-testing-id"
+ value = module.direct_vms.uuid
+ propagate_at_launch = true
+ },
+ {
+ key = "rabbitmq-testing-suffix"
+ value = var.files_suffix
+ propagate_at_launch = true
+ }
+ ]
+
+ lifecycle {
+ create_before_destroy = true
+ }
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/outputs.tf b/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/outputs.tf
new file mode 100644
index 0000000000..375f5974f3
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/outputs.tf
@@ -0,0 +1,5 @@
+# vim:sw=2:et:
+
+output "uuid" {
+ value = module.direct_vms.uuid
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/setup-vms.sh b/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/setup-vms.sh
new file mode 100755
index 0000000000..6147c3f0f4
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/setup-vms.sh
@@ -0,0 +1,178 @@
+#!/bin/sh
+# vim:sw=2:et:
+
+set -e
+
+usage() {
+ echo "Syntax: $(basename "$0") [-Dh] [-c <instance_count>] [-e <elixir_version>] [-s <ssh_key>] <erlang_version> [<erlang_app_dir> ...]"
+}
+
+instance_count=1
+
+while getopts "c:e:Dhs:" opt; do
+ case $opt in
+ h)
+ usage
+ exit
+ ;;
+ c)
+ instance_count=$OPTARG
+ ;;
+ e)
+ elixir_version=$OPTARG
+ ;;
+ D)
+ destroy=yes
+ ;;
+ s)
+ ssh_key=$OPTARG
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ usage 1>&2
+ exit 64
+ ;;
+ :)
+ echo "Option -$OPTARG requires an argument." >&2
+ usage 1>&2
+ exit 64
+ ;;
+ esac
+done
+shift $((OPTIND - 1))
+
+erlang_version=$1
+if test -z "$erlang_version"; then
+ echo "Erlang version is required" 1>&2
+ echo 1>&2
+ usage
+ exit 64
+fi
+shift
+
+terraform_dir=$(cd "$(dirname "$0")" && pwd)
+
+erlang_nodename=control
+dirs_archive=dirs-archive.tar.xz
+instance_name_prefix="[$(basename "$0")/$USER] "
+
+canonicalize_erlang_version() {
+ version=$1
+
+ case "$version" in
+ R[0-9]*)
+ echo "$version" | sed -E 's/(R[0-9]+(:?A|B)[0-9]+).*/\1/'
+ ;;
+ [0-9]*)
+ echo "$version" | sed -E 's/([0-9]+\.[0-9]+).*/\1/'
+ ;;
+ esac
+}
+
+find_ssh_key() {
+ for file in ~/.ssh/*terraform* ~/.ssh/id_rsa ~/.ssh/id_ed25519; do
+ if test -f "$file" && test -f "$file.pub"; then
+ echo "$file"
+ return
+ fi
+ done
+}
+
+list_dirs_to_upload() {
+ if test -z "$MAKE"; then
+ if gmake --version 2>&1 | grep -q "GNU Make"; then
+ MAKE='gmake'
+ elif make --version 2>&1 | grep -q "GNU Make"; then
+ MAKE='make'
+ fi
+ fi
+
+ template='dirs-to-upload.XXXX'
+ manifest=$(mktemp -t "$template")
+ for dir in "$@"; do
+ (cd "$dir" && pwd) >> "$manifest"
+ "$MAKE" --no-print-directory -C "$dir" fetch-test-deps >/dev/null
+ cat "$dir/.erlang.mk/recursive-test-deps-list.log" >> "$manifest"
+ done
+
+ sorted_manifest=$(mktemp -t "$template")
+ sort -u < "$manifest" > "$sorted_manifest"
+
+ # shellcheck disable=SC2094
+ while read -r dir; do
+ grep -q "^$dir/" "$sorted_manifest" || echo "$dir"
+ done < "$sorted_manifest" > "$manifest"
+
+ tar cf - -P \
+ --exclude '.terraform*' \
+ --exclude 'dirs-archive-*' \
+ --exclude "$erlang_nodename@*" \
+ -T "$manifest" \
+ | xz --threads=0 > "$dirs_archive"
+
+ rm "$manifest" "$sorted_manifest"
+}
+
+init_terraform() {
+ terraform init "$terraform_dir"
+}
+
+start_vms() {
+ terraform apply \
+ -auto-approve=true \
+ -var="erlang_version=$erlang_branch" \
+ -var="elixir_version=$elixir_version" \
+ -var="erlang_cookie=$erlang_cookie" \
+ -var="erlang_nodename=$erlang_nodename" \
+ -var="ssh_key=$ssh_key" \
+ -var="instance_count=$instance_count" \
+ -var="instance_name_prefix=\"$instance_name_prefix\"" \
+ -var="upload_dirs_archive=$dirs_archive" \
+ "$terraform_dir"
+}
+
+destroy_vms() {
+ terraform destroy \
+ -auto-approve=true \
+ -var="erlang_version=$erlang_branch" \
+ -var="elixir_version=$elixir_version" \
+ -var="erlang_cookie=$erlang_cookie" \
+ -var="erlang_nodename=$erlang_nodename" \
+ -var="ssh_key=$ssh_key" \
+ -var="instance_count=$instance_count" \
+ -var="instance_name_prefix=\"$instance_name_prefix\"" \
+ -var="upload_dirs_archive=$dirs_archive" \
+ "$terraform_dir"
+}
+
+erlang_branch=$(canonicalize_erlang_version "$erlang_version")
+if test -z "$erlang_branch"; then
+ echo "Erlang version '$erlang_version' malformed or unrecognized" 1>&2
+ echo 1>&2
+ usage
+ exit 65
+fi
+
+if test -z "$ssh_key"; then
+ ssh_key=$(find_ssh_key)
+fi
+if test -z "$ssh_key" || ! test -f "$ssh_key" || ! test -f "$ssh_key.pub"; then
+ echo "Please specify a private SSH key using '-s'" 1>&2
+ echo 1>&2
+ usage
+ exit 65
+fi
+
+erlang_cookie=$(cat ~/.erlang.cookie)
+
+list_dirs_to_upload "$@"
+init_terraform
+
+case "$destroy" in
+ yes)
+ destroy_vms
+ ;;
+ *)
+ start_vms
+ ;;
+esac
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/variables.tf b/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/variables.tf
new file mode 100644
index 0000000000..9ed1ee68fb
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/autoscaling-group/variables.tf
@@ -0,0 +1,80 @@
+# vim:sw=2:et:
+
+variable "erlang_version" {
+ description = <<EOF
+Erlang version to deploy on VMs. This may also determine the version of
+the underlying OS.
+EOF
+}
+
+variable "erlang_git_ref" {
+ default = ""
+ description = <<EOF
+Git reference if building Erlang from Git. Specifying the Erlang
+version is still required.
+EOF
+}
+
+variable "elixir_version" {
+ default = ""
+ description = <<EOF
+Elixir version to deploy on VMs. Default to the latest available.
+EOF
+}
+
+variable "erlang_cookie" {
+ description = <<EOF
+Erlang cookie to deploy on VMs.
+EOF
+}
+
+variable "erlang_nodename" {
+ description = <<EOF
+Name of the remote Erlang node.
+EOF
+}
+
+variable "ssh_key" {
+ description = <<EOF
+Path to the private SSH key to use to communicate with the VMs. The
+module then assumes that the public key is named "$ssh_key.pub".
+EOF
+}
+
+variable "instance_count" {
+ default = "1"
+ description = <<EOF
+Number of VMs to spawn.
+EOF
+}
+
+variable "upload_dirs_archive" {
+ description = <<EOF
+Archive of the directories to upload to the VMs. They will be placed
+in / on the VM, which means that the paths can be identical.
+EOF
+}
+
+variable "instance_name_prefix" {
+ default = "RabbitMQ testing: "
+}
+
+variable "instance_name_suffix" {
+ default = ""
+}
+
+variable "instance_name" {
+ default = "Unnamed"
+}
+
+variable "vpc_cidr_block" {
+ default = "10.0.0.0/16"
+}
+
+variable "files_suffix" {
+ default = ""
+}
+
+variable "aws_ec2_region" {
+ default = "eu-west-1"
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/main.tf b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/main.tf
new file mode 100644
index 0000000000..7d3130fd4e
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/main.tf
@@ -0,0 +1,234 @@
+# vim:sw=2:et:
+
+provider "aws" {
+ region = var.aws_ec2_region
+}
+
+locals {
+ vm_name = "${var.instance_name_prefix}${var.instance_name}${var.instance_name_suffix} - Erlang ${var.erlang_version}"
+
+ resource_prefix = "rabbitmq-testing-"
+ distribution = lookup(var.erlang_version_to_system, var.erlang_version)
+ ec2_instance_type = lookup(var.ec2_instance_types, local.distribution, "m5.large")
+ ami = lookup(var.amis, local.distribution)
+ username = lookup(var.usernames, local.distribution, "ec2-user")
+}
+
+// The directories archive is uploaded to Amazon S3. We first create a
+// temporary bucket.
+//
+// Note that we use this unique bucket name as a unique ID elsewhere.
+resource "aws_s3_bucket" "dirs_archive" {
+ bucket_prefix = local.resource_prefix
+ acl = "private"
+}
+
+locals {
+ uuid = replace(aws_s3_bucket.dirs_archive.id, local.resource_prefix, "")
+ dirs_archive = var.upload_dirs_archive
+}
+
+// We configure a VPC and a bucket policy to allow us to make the
+// directories archive private on S3 but still access it from the VMs.
+resource "aws_vpc" "vpc" {
+ cidr_block = var.vpc_cidr_block
+
+ enable_dns_support = true
+ enable_dns_hostnames = true
+
+ tags = {
+ Name = local.vm_name
+ rabbitmq-testing = true
+ rabbitmq-testing-id = local.uuid
+ rabbitmq-testing-suffix = var.files_suffix
+ }
+}
+
+resource "aws_internet_gateway" "gw" {
+ vpc_id = aws_vpc.vpc.id
+}
+
+resource "aws_default_route_table" "rt" {
+ default_route_table_id = aws_vpc.vpc.default_route_table_id
+ route {
+ cidr_block = "0.0.0.0/0"
+ gateway_id = aws_internet_gateway.gw.id
+ }
+}
+
+resource "aws_vpc_endpoint" "vpc" {
+ vpc_id = aws_vpc.vpc.id
+ service_name = "com.amazonaws.${aws_s3_bucket.dirs_archive.region}.s3"
+ route_table_ids = [aws_default_route_table.rt.id]
+}
+
+resource "aws_subnet" "vpc" {
+ cidr_block = var.vpc_cidr_block
+ vpc_id = aws_vpc.vpc.id
+ map_public_ip_on_launch = true
+}
+
+resource "aws_s3_bucket_policy" "dirs_archive" {
+ bucket = aws_s3_bucket.dirs_archive.id
+
+ policy = <<EOF
+{
+ "Version": "2012-10-17",
+ "Id": "Policy",
+ "Statement": [
+ {
+ "Sid": "Access-to-specific-VPCE-only",
+ "Action": "s3:*",
+ "Effect": "Allow",
+ "Resource": ["arn:aws:s3:::${aws_s3_bucket.dirs_archive.id}",
+ "arn:aws:s3:::${aws_s3_bucket.dirs_archive.id}/*"],
+ "Condition": {
+ "StringEquals": {
+ "aws:sourceVpce": "${aws_vpc_endpoint.vpc.id}"
+ }
+ },
+ "Principal": "*"
+ }
+ ]
+}
+EOF
+}
+
+// We are now ready to actually upload the directories archive to S3.
+resource "aws_s3_bucket_object" "dirs_archive" {
+ bucket = aws_s3_bucket.dirs_archive.id
+ key = basename(local.dirs_archive)
+ source = local.dirs_archive
+}
+
+// SSH key to communicate with the VMs.
+resource "aws_key_pair" "ci_user" {
+ key_name_prefix = local.resource_prefix
+ public_key = file("${var.ssh_key}.pub")
+}
+
+// Security group to allow SSH connections.
+resource "aws_security_group" "allow_ssh" {
+ name_prefix = "${local.resource_prefix}ssh-"
+ description = "Allow incoming SSH connections"
+ vpc_id = aws_vpc.vpc.id
+
+ ingress {
+ from_port = 0
+ to_port = 22
+ protocol = "tcp"
+ cidr_blocks = ["0.0.0.0/0"]
+ }
+
+ egress {
+ from_port = 0
+ to_port = 0
+ protocol = "-1"
+ cidr_blocks = ["0.0.0.0/0"]
+ }
+}
+
+// We need a security group to allow Erlang distribution between VMs and
+// also with the local host.
+resource "aws_security_group" "allow_erlang_dist" {
+ name_prefix = "${local.resource_prefix}erlang-"
+ description = "Allow Erlang distribution connections"
+ vpc_id = aws_vpc.vpc.id
+
+ ingress {
+ from_port = 0
+ to_port = 4369
+ protocol = "tcp"
+ cidr_blocks = ["0.0.0.0/0"]
+ }
+
+ ingress {
+ from_port = 10240
+ to_port = 65535
+ protocol = "tcp"
+ cidr_blocks = ["0.0.0.0/0"]
+ }
+
+ egress {
+ from_port = 0
+ to_port = 0
+ protocol = "-1"
+ cidr_blocks = ["0.0.0.0/0"]
+ }
+}
+
+// Setup script executed on VMs on startup. Its main purpose is to
+// install and configure Erlang, and start an Erlang node to later
+// control the VM.
+data "template_file" "user_data" {
+ template = file("${path.module}/templates/setup-erlang.sh")
+ vars = {
+ default_user = local.username
+ distribution = local.distribution
+
+ dirs_archive_url = "http://${aws_s3_bucket.dirs_archive.bucket_domain_name}/${aws_s3_bucket_object.dirs_archive.id}"
+ erlang_cookie = var.erlang_cookie
+ erlang_nodename = var.erlang_nodename
+ erlang_version = var.erlang_version
+ erlang_git_ref = var.erlang_git_ref
+ elixir_version = var.elixir_version
+ }
+}
+
+locals {
+ security_groups = [
+ aws_security_group.allow_ssh.id,
+ aws_security_group.allow_erlang_dist.id,
+ ]
+}
+
+// With the directories archive and the VPC in place, we can spawn the
+// VMs.
+resource "aws_instance" "vm" {
+ ami = local.ami
+ instance_type = local.ec2_instance_type
+ count = var.instance_count
+ key_name = aws_key_pair.ci_user.key_name
+
+ subnet_id = aws_subnet.vpc.id
+
+ vpc_security_group_ids = local.security_groups
+
+ user_data = data.template_file.user_data.rendered
+
+ // We need about 1.5 GiB of storage space, but apparently, 8 GiB is
+ // the minimum.
+ root_block_device {
+ volume_size = 8
+ delete_on_termination = true
+ }
+
+ tags = {
+ Name = "${local.vm_name} - #${count.index}"
+ rabbitmq-testing = true
+ rabbitmq-testing-id = local.uuid
+ }
+
+ connection {
+ type = "ssh"
+ user = local.username
+ private_key = file(var.ssh_key)
+ agent = false
+ }
+}
+
+data "template_file" "erlang_node_hostname" {
+ count = var.instance_count
+ template = "$${private_dns}"
+ vars = {
+ private_dns = element(split(".", aws_instance.vm.*.private_dns[count.index]), 0)
+ }
+}
+
+data "template_file" "erlang_node_nodename" {
+ count = var.instance_count
+ template = "${var.erlang_nodename}@$${private_dns}"
+ vars = {
+ private_dns = data.template_file.erlang_node_hostname.*.rendered[count.index]
+ }
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/outputs.tf b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/outputs.tf
new file mode 100644
index 0000000000..424c26c4f2
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/outputs.tf
@@ -0,0 +1,59 @@
+# vim:sw=2:et:
+
+output "uuid" {
+ value = local.uuid
+}
+
+output "ct_peer_ipaddrs" {
+ value = zipmap(
+ data.template_file.erlang_node_hostname.*.rendered,
+ aws_instance.vm.*.public_ip)
+}
+
+output "ct_peer_nodenames" {
+ value = zipmap(
+ data.template_file.erlang_node_hostname.*.rendered,
+ data.template_file.erlang_node_nodename.*.rendered)
+}
+
+output "ssh_user_and_host" {
+ value = zipmap(
+ data.template_file.erlang_node_hostname.*.rendered,
+ formatlist("%s@%s", local.username, aws_instance.vm.*.public_dns))
+}
+
+// The following variables are used by other modules (e.g.
+// `autoscaling-group`) who want to benefit from the same resources
+// (beside the actual instances).
+
+output "resource_prefix" {
+ value = local.resource_prefix
+}
+
+output "instance_name" {
+ value = local.vm_name
+}
+
+output "instance_ami" {
+ value = local.ami
+}
+
+output "instance_type" {
+ value = local.ec2_instance_type
+}
+
+output "ssh_key_name" {
+ value = aws_key_pair.ci_user.key_name
+}
+
+output "security_groups" {
+ value = local.security_groups
+}
+
+output "instance_user_data" {
+ value = data.template_file.user_data.rendered
+}
+
+output "subnet_id" {
+ value = aws_subnet.vpc.id
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/setup-vms.sh b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/setup-vms.sh
new file mode 100755
index 0000000000..6aae723db0
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/setup-vms.sh
@@ -0,0 +1,187 @@
+#!/bin/sh
+# vim:sw=2:et:
+
+set -e
+
+usage() {
+ echo "Syntax: $(basename "$0") [-Dh] [-c <instance_count>] [-e <elixir_version>] [-s <ssh_key>] <erlang_version> [<erlang_app_dir> ...]"
+}
+
+instance_count=1
+
+while getopts "c:e:Dhs:" opt; do
+ case $opt in
+ h)
+ usage
+ exit
+ ;;
+ c)
+ instance_count=$OPTARG
+ ;;
+ e)
+ elixir_version=$OPTARG
+ ;;
+ D)
+ destroy=yes
+ ;;
+ s)
+ ssh_key=$OPTARG
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ usage 1>&2
+ exit 64
+ ;;
+ :)
+ echo "Option -$OPTARG requires an argument." >&2
+ usage 1>&2
+ exit 64
+ ;;
+ esac
+done
+shift $((OPTIND - 1))
+
+erlang_version=$1
+if test -z "$erlang_version"; then
+ echo "Erlang version is required" 1>&2
+ echo 1>&2
+ usage
+ exit 64
+fi
+shift
+
+terraform_dir=$(cd "$(dirname "$0")" && pwd)
+
+erlang_nodename=control
+dirs_archive=dirs-archive.tar.xz
+instance_name_prefix="[$(basename "$0")/$USER] "
+
+canonicalize_erlang_version() {
+ version=$1
+
+ case "$version" in
+ R[0-9]*)
+ echo "$version" | sed -E 's/(R[0-9]+(:?A|B)[0-9]+).*/\1/'
+ ;;
+ [0-9]*)
+ echo "$version" | sed -E 's/([0-9]+\.[0-9]+).*/\1/'
+ ;;
+ esac
+}
+
+find_ssh_key() {
+ for file in ~/.ssh/*terraform* ~/.ssh/id_rsa ~/.ssh/id_ed25519; do
+ if test -f "$file" && test -f "$file.pub"; then
+ echo "$file"
+ return
+ fi
+ done
+}
+
+list_dirs_to_upload() {
+ if test -z "$MAKE"; then
+ if gmake --version 2>&1 | grep -q "GNU Make"; then
+ MAKE='gmake'
+ elif make --version 2>&1 | grep -q "GNU Make"; then
+ MAKE='make'
+ fi
+ fi
+
+ template='dirs-to-upload.XXXX'
+ manifest=$(mktemp -t "$template")
+ for dir in "$@"; do
+ (cd "$dir" && pwd) >> "$manifest"
+ "$MAKE" --no-print-directory -C "$dir" fetch-test-deps >/dev/null
+ cat "$dir/.erlang.mk/recursive-test-deps-list.log" >> "$manifest"
+ done
+
+ sorted_manifest=$(mktemp -t "$template")
+ sort -u < "$manifest" > "$sorted_manifest"
+
+ # shellcheck disable=SC2094
+ while read -r dir; do
+ grep -q "^$dir/" "$sorted_manifest" || echo "$dir"
+ done < "$sorted_manifest" > "$manifest"
+
+ tar cf - -P \
+ --exclude '.terraform*' \
+ --exclude 'dirs-archive-*' \
+ --exclude "$erlang_nodename@*" \
+ -T "$manifest" \
+ | xz --threads=0 > "$dirs_archive"
+
+ rm "$manifest" "$sorted_manifest"
+}
+
+init_terraform() {
+ terraform init "$terraform_dir"
+}
+
+start_vms() {
+ terraform apply \
+ -auto-approve=true \
+ -var="erlang_version=$erlang_branch" \
+ -var="elixir_version=$elixir_version" \
+ -var="erlang_git_ref=$erlang_git_ref" \
+ -var="erlang_cookie=$erlang_cookie" \
+ -var="erlang_nodename=$erlang_nodename" \
+ -var="ssh_key=$ssh_key" \
+ -var="instance_count=$instance_count" \
+ -var="instance_name_prefix=\"$instance_name_prefix\"" \
+ -var="upload_dirs_archive=$dirs_archive" \
+ "$terraform_dir"
+}
+
+destroy_vms() {
+ terraform destroy \
+ -auto-approve=true \
+ -var="erlang_version=$erlang_branch" \
+ -var="elixir_version=$elixir_version" \
+ -var="erlang_git_ref=$erlang_git_ref" \
+ -var="erlang_cookie=$erlang_cookie" \
+ -var="erlang_nodename=$erlang_nodename" \
+ -var="ssh_key=$ssh_key" \
+ -var="instance_count=$instance_count" \
+ -var="instance_name_prefix=\"$instance_name_prefix\"" \
+ -var="upload_dirs_archive=$dirs_archive" \
+ "$terraform_dir"
+}
+
+case "$erlang_version" in
+ *@*)
+ erlang_git_ref=${erlang_version#*@}
+ erlang_version=${erlang_version%@*}
+ ;;
+esac
+
+erlang_branch=$(canonicalize_erlang_version "$erlang_version")
+if test -z "$erlang_branch"; then
+ echo "Erlang version '$erlang_version' malformed or unrecognized" 1>&2
+ echo 1>&2
+ usage
+ exit 65
+fi
+
+if test -z "$ssh_key"; then
+ ssh_key=$(find_ssh_key)
+fi
+if test -z "$ssh_key" || ! test -f "$ssh_key" || ! test -f "$ssh_key.pub"; then
+ echo "Please specify a private SSH key using '-s'" 1>&2
+ echo 1>&2
+ usage
+ exit 65
+fi
+
+erlang_cookie=$(cat ~/.erlang.cookie)
+
+list_dirs_to_upload "$@"
+init_terraform
+
+case "$destroy" in
+ yes)
+ destroy_vms
+ ;;
+ *)
+ start_vms
+ ;;
+esac
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/templates/setup-erlang.sh b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/templates/setup-erlang.sh
new file mode 100644
index 0000000000..8fb5a4d708
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/templates/setup-erlang.sh
@@ -0,0 +1,264 @@
+#!/bin/sh
+# vim:sw=2:et:
+
+set -ex
+
+# Execute ourselves as root if we are an unprivileged user.
+if test "$(id -u)" != '0'; then
+ exec sudo -i "$0" "$@"
+fi
+
+HOME=/root
+export HOME
+
+DEBIAN_FRONTEND=noninteractive
+export DEBIAN_FRONTEND
+
+# shellcheck disable=SC2016
+readonly erlang_version='${erlang_version}'
+# shellcheck disable=SC2016
+erlang_git_ref='${erlang_git_ref}'
+# shellcheck disable=SC2016
+readonly elixir_version='${elixir_version}'
+# shellcheck disable=SC2016
+readonly erlang_nodename='${erlang_nodename}'
+# shellcheck disable=SC2016
+readonly default_user='${default_user}'
+# shellcheck disable=SC2016
+readonly dirs_archive_url='${dirs_archive_url}'
+# shellcheck disable=SC2016
+readonly distribution='${distribution}'
+# shellcheck disable=SC2016
+readonly erlang_cookie='${erlang_cookie}'
+
+readonly debian_codename="$${distribution#debian-*}"
+
+case "$erlang_version" in
+ 24.*)
+ if test -z "$erlang_git_ref"; then
+ erlang_git_ref='master'
+ fi
+ ;;
+ 23.*|22.*|21.*|20.*|19.3)
+ readonly erlang_package_version="1:$erlang_version-1"
+ ;;
+ R16B03)
+ readonly erlang_package_version='1:16.b.3-3'
+ ;;
+ *)
+ echo "[ERROR] unknown erlang version: $erlang_version" 1>&2
+ exit 69 # EX_UNAVAILABLE; see sysexists(3)
+ ;;
+esac
+
+install_essentials() {
+ apt-get -qq update
+ apt-get -qq install wget curl gnupg
+}
+
+setup_backports() {
+ # Enable backports.
+ cat >/etc/apt/sources.list.d/backports.list << EOF
+deb http://cdn-fastly.deb.debian.org/debian $debian_codename-backports main
+EOF
+ apt-get -qq update
+}
+
+# --------------------------------------------------------------------
+# Functions to take Erlang and Elixir from Debian packages.
+# --------------------------------------------------------------------
+
+determine_version_to_pin() {
+ package=$1
+ min_version=$2
+
+ apt-cache policy "$package" | \
+ awk '
+BEGIN {
+ version_to_pin = "";
+}
+/^ ( |\*\*\*) [^ ]/ {
+ if ($1 == "***") {
+ version = $2;
+ } else {
+ version = $1;
+ }
+
+ if (version_to_pin) {
+ exit;
+ } else if (match(version, /^'$min_version'([-.]|$)/)) {
+ version_to_pin = version;
+ }
+}
+END {
+ if (version_to_pin) {
+ print version_to_pin;
+ exit;
+ } else {
+ exit 1;
+ }
+}'
+}
+
+setup_erlang_deb_repository() {
+ # Setup repository to get Erlang.
+ wget -O- https://www.rabbitmq.com/rabbitmq-release-signing-key.asc | apt-key add -
+ wget -O- https://dl.cloudsmith.io/public/rabbitmq/rabbitmq-erlang/gpg.E495BB49CC4BBE5B.key | apt-key add -
+ cat >/etc/apt/sources.list.d/rabbitmq-erlang.list <<EOF
+deb https://dl.cloudsmith.io/public/rabbitmq/rabbitmq-erlang/deb/debian buster main
+EOF
+
+ # Configure Erlang version pinning.
+ cat >/etc/apt/preferences.d/erlang <<EOF
+Package: erlang*
+Pin: version $erlang_package_version
+Pin-Priority: 1000
+EOF
+
+ apt-get -qq install -y --no-install-recommends apt-transport-https
+ apt-get -qq update
+}
+
+apt_install_erlang() {
+ apt-get -qq install -y --no-install-recommends \
+ erlang-base erlang-nox erlang-dev erlang-src erlang-common-test
+}
+
+apt_install_elixir() {
+ if test "$elixir_version"; then
+ # Configure Elixir version pinning.
+ elixir_package_version=$(determine_version_to_pin elixir "$elixir_version")
+
+ cat >/etc/apt/preferences.d/elixir <<EOF
+Package: elixir
+Pin: version $elixir_package_version
+Pin-Priority: 1000
+EOF
+ fi
+
+ apt-get -qq install -y --no-install-recommends elixir
+}
+
+apt_install_extra() {
+ readonly extra_pkgs='git make rsync vim-nox xz-utils zip'
+ readonly extra_backports=''
+
+ # shellcheck disable=SC2086
+ test -z "$extra_pkgs" || \
+ apt-get -qq install -y --no-install-recommends \
+ $extra_pkgs
+
+ # shellcheck disable=SC2086
+ test -e "$extra_backports" || \
+ apt-get -qq install -y -V --fix-missing --no-install-recommends \
+ -t "$debian_codename"-backports \
+ $extra_backports
+}
+
+# --------------------------------------------------------------------
+# Functions to build Erlang and Elixir from sources.
+# --------------------------------------------------------------------
+
+install_kerl() {
+ apt-get -qq install -y --no-install-recommends \
+ curl
+
+ mkdir -p /usr/local/bin
+ cd /usr/local/bin
+ curl -O https://raw.githubusercontent.com/kerl/kerl/master/kerl
+ chmod a+x kerl
+}
+
+kerl_install_erlang() {
+ apt-get -qq install -y --no-install-recommends \
+ git \
+ build-essential \
+ autoconf automake libtool \
+ libssl-dev \
+ libncurses5-dev \
+ libsctp1 libsctp-dev
+
+ kerl build git https://github.com/erlang/otp.git "$erlang_git_ref" "$erlang_version"
+ kerl install "$erlang_version" /usr/local/erlang
+
+ . /usr/local/erlang/activate
+ echo '. /usr/local/erlang/activate' > /etc/profile.d/erlang.sh
+}
+
+install_kiex() {
+ curl -sSL https://raw.githubusercontent.com/taylor/kiex/master/install | bash -s
+
+ mv "$HOME/.kiex" /usr/local/kiex
+ sed -E \
+ -e 's,\\\$HOME/\.kiex,/usr/local/kiex,' \
+ -e 's,\$HOME/\.kiex,/usr/local/kiex,' \
+ < /usr/local/kiex/bin/kiex \
+ > /usr/local/kiex/bin/kiex.patched
+ mv /usr/local/kiex/bin/kiex.patched /usr/local/kiex/bin/kiex
+ chmod a+x /usr/local/kiex/bin/kiex
+}
+
+kiex_install_elixir() {
+ case "$erlang_version" in
+ 22.*|23.*|24.*)
+ url="https://github.com/elixir-lang/elixir/releases/download/v$elixir_version/Precompiled.zip"
+ wget -q -O/tmp/elixir.zip "$url"
+
+ apt-get -qq install -y --no-install-recommends unzip
+
+ mkdir -p /usr/local/elixir
+ (cd /usr/local/elixir && unzip -q /tmp/elixir.zip)
+ export PATH=/usr/local/elixir/bin:$PATH
+ ;;
+ *)
+ export PATH=/usr/local/kiex/bin:$PATH
+ latest_elixir_version=$(kiex list releases | tail -n 1 | awk '{print $1}')
+ kiex install $latest_elixir_version
+
+ . /usr/local/kiex/elixirs/elixir-$latest_elixir_version.env
+ cat >> /etc/profile.d/erlang.sh <<EOF
+
+. /usr/local/kiex/elixirs/elixir-$latest_elixir_version.env
+EOF
+ ;;
+ esac
+}
+
+# --------------------------------------------------------------------
+# Main.
+# --------------------------------------------------------------------
+
+install_essentials
+setup_backports
+
+# Install Erlang + various tools.
+if test "$erlang_package_version"; then
+ setup_erlang_deb_repository
+ apt_install_erlang
+ apt_install_elixir
+elif test "$erlang_git_ref"; then
+ install_kerl
+ kerl_install_erlang
+ install_kiex
+ kiex_install_elixir
+fi
+
+apt_install_extra
+
+# Store Erlang cookie file for both root and the default user.
+for file in ~/.erlang.cookie "/home/$default_user/.erlang.cookie"; do
+ echo "$erlang_cookie" > "$file"
+ chmod 400 "$file"
+done
+chown "$default_user" "/home/$default_user/.erlang.cookie"
+
+# Fetch and extract the dirs archive.
+dirs_archive="/tmp/$(basename "$dirs_archive_url")"
+wget -q -O"$dirs_archive" "$dirs_archive_url"
+if test -s "$dirs_archive"; then
+ xzcat "$dirs_archive" | tar xf - -P
+fi
+rm -f "$dirs_archive"
+
+# Start an Erlang node to control the VM from Erlang.
+erl -noinput -sname "$erlang_nodename" -hidden -detached
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/variables.tf b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/variables.tf
new file mode 100644
index 0000000000..d77b67228b
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/direct-vms/variables.tf
@@ -0,0 +1,147 @@
+# vim:sw=2:et:
+
+variable "erlang_version" {
+ description = <<EOF
+Erlang version to deploy on VMs. This may also determine the version of
+the underlying OS.
+EOF
+}
+
+variable "erlang_git_ref" {
+ default = ""
+ description = <<EOF
+Git reference if building Erlang from Git. Specifying the Erlang
+version is still required.
+EOF
+}
+
+variable "elixir_version" {
+ default = ""
+ description = <<EOF
+Elixir version to deploy on VMs. Default to the latest available.
+EOF
+}
+
+variable "erlang_cookie" {
+ description = <<EOF
+Erlang cookie to deploy on VMs.
+EOF
+}
+
+variable "erlang_nodename" {
+ default = "control"
+ description = <<EOF
+Name of the remote Erlang node.
+EOF
+}
+
+variable "ssh_key" {
+ description = <<EOF
+Path to the private SSH key to use to communicate with the VMs. The
+module then assumes that the public key is named "$ssh_key.pub".
+EOF
+}
+
+variable "instance_count" {
+ default = "1"
+ description = <<EOF
+Number of VMs to spawn.
+EOF
+}
+
+variable "upload_dirs_archive" {
+ description = <<EOF
+Archive of the directories to upload to the VMs. They will be placed
+in / on the VM, which means that the paths can be identical.
+EOF
+}
+
+variable "instance_name_prefix" {
+ default = "RabbitMQ testing: "
+}
+
+variable "instance_name_suffix" {
+ default = ""
+}
+
+variable "instance_name" {
+ default = "Unnamed"
+}
+
+variable "vpc_cidr_block" {
+ default = "10.0.0.0/16"
+}
+
+variable "files_suffix" {
+ default = ""
+}
+
+variable "aws_ec2_region" {
+ default = "eu-west-1"
+}
+
+variable "erlang_version_to_system" {
+ type = map
+ default = {
+ "22.3" = "debian-buster"
+ "23.0" = "debian-buster"
+ "23.1" = "debian-buster"
+ "23.2" = "debian-buster"
+ "23.3" = "debian-buster"
+ "24.0" = "debian-buster"
+ }
+}
+
+variable "ec2_instance_types" {
+ type = map
+ default = {
+ }
+}
+
+# AMIs for eu-west-1 (Ireland)
+variable "amis" {
+ type = map
+ default = {
+ "centos-7" = "ami-6e28b517"
+ "centos-8" = "ami-0645e7b5435a343a5" # Community-provided
+ "debian-buster" = "ami-02498d1ddb8cc6a86" # Community-provided
+ "fedora-30" = "ami-0c8df718af40abdae"
+ "fedora-31" = "ami-00d8194a6e394e1c5"
+ "fedora-32" = "ami-0f17c0eb4a2e08778"
+ "fedora-33" = "ami-0aa3a65f84cb982ca"
+ "freebsd-10" = "ami-76f82c0f"
+ "freebsd-11" = "ami-ab56bed2"
+ "opensuse-leap-15.1" = "ami-0f81506cab2b62029"
+ "opensuse-leap-15.2" = "ami-013f2b687f5a91567"
+ "rhel-7" = "ami-8b8c57f8"
+ "sles-11" = "ami-a2baf5d5"
+ "sles-12" = "ami-f4278487"
+ "ubuntu-16.04" = "ami-067b6923c66564bf6"
+ "ubuntu-18.04" = "ami-01cca82393e531118"
+ "ubuntu-20.04" = "ami-09376517f0f510ad9"
+ }
+}
+
+variable "usernames" {
+ type = map
+ default = {
+ "centos-7" = "centos"
+ "centos-8" = "centos"
+ "debian-buster" = "admin"
+ "debian-bullseye" = "admin"
+ "fedora-30" = "fedora"
+ "fedora-31" = "fedora"
+ "fedora-32" = "fedora"
+ "fedora-33" = "fedora"
+ "freebsd-10" = "ec2-user"
+ "freebsd-11" = "ec2-user"
+ "opensuse-leap-15.1" = "ec2-user"
+ "opensuse-leap-15.2" = "ec2-user"
+ "rhel-7" = "ec2-user"
+ "sles-11" = "ec2-user"
+ "sles-12" = "ec2-user"
+ "ubuntu-16.04" = "ubuntu"
+ "ubuntu-18.04" = "ubuntu"
+ "ubuntu-20.04" = "ubuntu"
+ }
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/main.tf b/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/main.tf
new file mode 100644
index 0000000000..e2720db11e
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/main.tf
@@ -0,0 +1,31 @@
+# vim:sw=2:et:
+
+provider "aws" {
+ region = "eu-west-1"
+}
+
+data "aws_instances" "vms" {
+ instance_tags = {
+ rabbitmq-testing = true
+ rabbitmq-testing-id = var.uuid
+ }
+}
+
+data "template_file" "erlang_node_hostname" {
+ count = length(data.aws_instances.vms.ids)
+ template = "$${private_dns}"
+ vars = {
+ // FIXME: Here we hard-code how Amazon EC2 formats hostnames based
+ // on the private IP address.
+ private_dns = "ip-${
+ join("-", split(".", data.aws_instances.vms.private_ips[count.index]))}"
+ }
+}
+
+data "template_file" "erlang_node_nodename" {
+ count = length(data.aws_instances.vms.ids)
+ template = "${var.erlang_nodename}@$${private_dns}"
+ vars = {
+ private_dns = data.template_file.erlang_node_hostname.*.rendered[count.index]
+ }
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/outputs.tf b/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/outputs.tf
new file mode 100644
index 0000000000..9a2b924476
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/outputs.tf
@@ -0,0 +1,21 @@
+# vim:sw=2:et:
+
+output "instance_count" {
+ value = length(data.aws_instances.vms.ids)
+}
+
+output "instance_ids" {
+ value = data.aws_instances.vms.ids
+}
+
+output "ct_peer_ipaddrs" {
+ value = zipmap(
+ data.template_file.erlang_node_hostname.*.rendered,
+ data.aws_instances.vms.public_ips)
+}
+
+output "ct_peer_nodenames" {
+ value = zipmap(
+ data.template_file.erlang_node_hostname.*.rendered,
+ data.template_file.erlang_node_nodename.*.rendered)
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/query-vms.sh b/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/query-vms.sh
new file mode 100755
index 0000000000..67c2291b4f
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/query-vms.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+# vim:sw=2:et:
+
+set -e
+
+usage() {
+ echo "Syntax: $(basename "$0") [-h] <uuid>"
+}
+
+while getopts "h" opt; do
+ case $opt in
+ h)
+ usage
+ exit
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ usage 1>&2
+ exit 64
+ ;;
+ :)
+ echo "Option -$OPTARG requires an argument." >&2
+ usage 1>&2
+ exit 64
+ ;;
+ esac
+done
+shift $((OPTIND - 1))
+
+uuid=$1
+if test -z "$uuid"; then
+ echo "Unique ID is required" 1>&2
+ echo 1>&2
+ usage
+ exit 64
+fi
+shift
+
+terraform_dir=$(cd "$(dirname "$0")" && pwd)
+
+init_terraform() {
+ terraform init "$terraform_dir"
+}
+
+query_vms() {
+ terraform apply \
+ -auto-approve=true \
+ -var="uuid=$uuid" \
+ -var="erlang_nodename=control" \
+ "$terraform_dir"
+}
+
+init_terraform
+
+query_vms
diff --git a/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/variables.tf b/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/variables.tf
new file mode 100644
index 0000000000..422ce6d043
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/terraform/vms-query/variables.tf
@@ -0,0 +1,14 @@
+# vim:sw=2:et:
+
+variable "uuid" {
+ description = <<EOF
+Unique ID of the deployment.
+EOF
+}
+
+variable "erlang_nodename" {
+ default = "unspecified"
+ description = <<EOF
+Name of the remote Erlang node.
+EOF
+}
diff --git a/deps/rabbitmq_ct_helpers/tools/tls-certs/.gitignore b/deps/rabbitmq_ct_helpers/tools/tls-certs/.gitignore
new file mode 100644
index 0000000000..636d68e744
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/tls-certs/.gitignore
@@ -0,0 +1 @@
+openssl.cnf
diff --git a/deps/rabbitmq_ct_helpers/tools/tls-certs/Makefile b/deps/rabbitmq_ct_helpers/tools/tls-certs/Makefile
new file mode 100644
index 0000000000..6f6fec5fd6
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/tls-certs/Makefile
@@ -0,0 +1,70 @@
+ifndef DIR
+$(error DIR must be specified)
+endif
+
+PASSWORD ?= changeme
+HOSTNAME := $(shell if [ "$$(uname)" = Darwin ]; then hostname -s; else hostname; fi)
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+gen_verbose_0 = @echo " GEN " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+openssl_output_0 = 2>/dev/null
+openssl_output = $(openssl_output_$(V))
+
+.PRECIOUS: %/testca/cacert.pem
+.PHONY: all testca server client clean
+
+all: server client
+ @:
+
+testca: $(DIR)/testca/cacert.pem
+
+server: TARGET = server
+server: $(DIR)/server/cert.pem
+ @:
+
+client: TARGET = client
+client: $(DIR)/client/cert.pem
+ @:
+
+$(DIR)/testca/cacert.pem:
+ $(gen_verbose) mkdir -p $(dir $@)
+ $(verbose) { ( cd $(dir $@) && \
+ mkdir -p certs private && \
+ chmod 700 private && \
+ echo 01 > serial && \
+ :> index.txt && \
+ sed -e 's/@HOSTNAME@/$(HOSTNAME)/g' $(CURDIR)/openssl.cnf.in > $(CURDIR)/openssl.cnf && \
+ openssl req -x509 -config $(CURDIR)/openssl.cnf -newkey rsa:2048 -days 365 \
+ -out cacert.pem -outform PEM -subj /CN=MyTestCA/L=$$$$/ -nodes && \
+ openssl x509 -in cacert.pem -out cacert.cer -outform DER ) $(openssl_output) \
+ || (rm -rf $(dir $@) && false); }
+
+$(DIR)/%/cert.pem: $(DIR)/testca/cacert.pem
+ $(gen_verbose) mkdir -p $(DIR)/$(TARGET)
+ $(verbose) { ( cd $(DIR)/$(TARGET) && \
+ openssl genrsa -out key.pem 2048 && \
+ openssl req -new -key key.pem -out req.pem -outform PEM \
+ -subj /C=UK/ST=England/CN=$(HOSTNAME)/O=$(TARGET)/L=$$$$/ -nodes && \
+ cd ../testca && \
+ sed -e 's/@HOSTNAME@/$(HOSTNAME)/g' $(CURDIR)/openssl.cnf.in > $(CURDIR)/openssl.cnf && \
+ openssl ca -config $(CURDIR)/openssl.cnf -in ../$(TARGET)/req.pem -out \
+ ../$(TARGET)/cert.pem -notext -batch -extensions \
+ $(TARGET)_ca_extensions && \
+ cd ../$(TARGET) && \
+ openssl pkcs12 -export -out keycert.p12 -in cert.pem -inkey key.pem \
+ -passout pass:$(PASSWORD) ) $(openssl_output) || (rm -rf $(DIR)/$(TARGET) && false); }
+
+clean:
+ rm -rf $(DIR)/testca
+ rm -rf $(DIR)/server
+ rm -rf $(DIR)/client
diff --git a/deps/rabbitmq_ct_helpers/tools/tls-certs/openssl.cnf.in b/deps/rabbitmq_ct_helpers/tools/tls-certs/openssl.cnf.in
new file mode 100644
index 0000000000..ecce78ca06
--- /dev/null
+++ b/deps/rabbitmq_ct_helpers/tools/tls-certs/openssl.cnf.in
@@ -0,0 +1,62 @@
+[ ca ]
+default_ca = testca
+
+[ testca ]
+dir = .
+certificate = $dir/cacert.pem
+database = $dir/index.txt
+new_certs_dir = $dir/certs
+private_key = $dir/private/cakey.pem
+serial = $dir/serial
+
+default_crl_days = 7
+default_days = 365
+default_md = sha256
+
+policy = testca_policy
+x509_extensions = certificate_extensions
+
+[ testca_policy ]
+commonName = supplied
+stateOrProvinceName = optional
+countryName = optional
+emailAddress = optional
+organizationName = optional
+organizationalUnitName = optional
+domainComponent = optional
+
+[ certificate_extensions ]
+basicConstraints = CA:false
+
+[ req ]
+default_bits = 2048
+default_keyfile = ./private/cakey.pem
+default_md = sha256
+prompt = yes
+distinguished_name = root_ca_distinguished_name
+x509_extensions = root_ca_extensions
+
+[ root_ca_distinguished_name ]
+commonName = hostname
+countryName_default = UK
+stateOrProvinceName_default = London
+organizationName_default = RabbitMQ
+
+[ root_ca_extensions ]
+basicConstraints = CA:true
+keyUsage = keyCertSign, cRLSign
+
+[ client_ca_extensions ]
+basicConstraints = CA:false
+keyUsage = digitalSignature,keyEncipherment
+extendedKeyUsage = 1.3.6.1.5.5.7.3.2
+
+[ server_ca_extensions ]
+basicConstraints = CA:false
+keyUsage = digitalSignature,keyEncipherment
+extendedKeyUsage = 1.3.6.1.5.5.7.3.1
+subjectAltName = @server_alt_names
+
+[ server_alt_names ]
+DNS.1 = @HOSTNAME@
+DNS.2 = localhost
diff --git a/deps/rabbitmq_event_exchange/BUILD.bazel b/deps/rabbitmq_event_exchange/BUILD.bazel
new file mode 100644
index 0000000000..893af482d0
--- /dev/null
+++ b/deps/rabbitmq_event_exchange/BUILD.bazel
@@ -0,0 +1,60 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_event_exchange"
+
+APP_DESCRIPTION = "Event Exchange Type"
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_event_exchange"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ runtime_deps = [
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_event_exchange/Makefile b/deps/rabbitmq_event_exchange/Makefile
index 3e04c49c34..9058e29d26 100644
--- a/deps/rabbitmq_event_exchange/Makefile
+++ b/deps/rabbitmq_event_exchange/Makefile
@@ -16,5 +16,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_event_exchange/erlang.mk b/deps/rabbitmq_event_exchange/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_event_exchange/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_event_exchange/rabbitmq-components.mk b/deps/rabbitmq_event_exchange/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_event_exchange/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_event_exchange/src/rabbit_event_exchange_decorator.erl b/deps/rabbitmq_event_exchange/src/rabbit_event_exchange_decorator.erl
index ea7fffafcd..15d1a33fdf 100644
--- a/deps/rabbitmq_event_exchange/src/rabbit_event_exchange_decorator.erl
+++ b/deps/rabbitmq_event_exchange/src/rabbit_event_exchange_decorator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_event_exchange_decorator).
diff --git a/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl b/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl
index eb511abaec..975e51390f 100644
--- a/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl
+++ b/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_event).
@@ -142,6 +142,10 @@ key(policy_set) ->
<<"policy.set">>;
key(policy_cleared) ->
<<"policy.cleared">>;
+key(queue_policy_updated) ->
+ <<"queue.policy.updated">>;
+key(queue_policy_cleared) ->
+ <<"queue.policy.cleared">>;
key(parameter_set) ->
<<"parameter.set">>;
key(parameter_cleared) ->
diff --git a/deps/rabbitmq_event_exchange/test/config_schema_SUITE.erl b/deps/rabbitmq_event_exchange/test/config_schema_SUITE.erl
index ac20e857ab..df6dd193ae 100644
--- a/deps/rabbitmq_event_exchange/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_event_exchange/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_event_exchange/test/system_SUITE.erl b/deps/rabbitmq_event_exchange/test/system_SUITE.erl
index 79b819b962..aa305d18d5 100644
--- a/deps/rabbitmq_event_exchange/test/system_SUITE.erl
+++ b/deps/rabbitmq_event_exchange/test/system_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
diff --git a/deps/rabbitmq_event_exchange/test/unit_SUITE.erl b/deps/rabbitmq_event_exchange/test/unit_SUITE.erl
index 02f0ab143c..d6ae77c08e 100644
--- a/deps/rabbitmq_event_exchange/test/unit_SUITE.erl
+++ b/deps/rabbitmq_event_exchange/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
diff --git a/deps/rabbitmq_federation/BUILD.bazel b/deps/rabbitmq_federation/BUILD.bazel
new file mode 100644
index 0000000000..0f84e743c1
--- /dev/null
+++ b/deps/rabbitmq_federation/BUILD.bazel
@@ -0,0 +1,135 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_federation"
+
+APP_DESCRIPTION = "RabbitMQ Federation"
+
+APP_MODULE = "rabbit_federation_app"
+
+APP_ENV = """[
+ {pgroup_name_cluster_id, false},
+ {internal_exchange_check_interval, 90000}
+ ]"""
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_cli:rabbitmqctl",
+]
+
+DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+erlc(
+ name = "rabbit_federation_test_util",
+ testonly = True,
+ srcs = [
+ "test/rabbit_federation_test_util.erl",
+ ],
+ hdrs = glob([
+ "include/**/*.hrl",
+ "src/**/*.hrl",
+ ]),
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+ deps = [
+ ":test_bazel_erlang_lib",
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+)
+
+PACKAGE = "deps/rabbitmq_federation"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "exchange_SUITE",
+ size = "large",
+ additional_beam = [
+ ":rabbit_federation_test_util",
+ ],
+ flaky = True,
+ shard_count = 6,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "federation_status_command_SUITE",
+ additional_beam = [
+ ":rabbit_federation_test_util",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "queue_SUITE",
+ additional_beam = [
+ ":rabbit_federation_test_util",
+ ],
+ flaky = True,
+ shard_count = 6,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_federation_status_SUITE",
+ additional_beam = [
+ ":rabbit_federation_test_util",
+ ":exchange_SUITE_beam_files",
+ ":queue_SUITE_beam_files",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "restart_federation_link_command_SUITE",
+ additional_beam = [
+ ":rabbit_federation_test_util",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "unit_inbroker_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_federation/Makefile b/deps/rabbitmq_federation/Makefile
index ebe2eea6ba..e9883aae9f 100644
--- a/deps/rabbitmq_federation/Makefile
+++ b/deps/rabbitmq_federation/Makefile
@@ -25,5 +25,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_federation/erlang.mk b/deps/rabbitmq_federation/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_federation/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_federation/include/logging.hrl b/deps/rabbitmq_federation/include/logging.hrl
new file mode 100644
index 0000000000..019713e11b
--- /dev/null
+++ b/deps/rabbitmq_federation/include/logging.hrl
@@ -0,0 +1,3 @@
+-include_lib("rabbit_common/include/logging.hrl").
+
+-define(RMQLOG_DOMAIN_FEDERATION, ?DEFINE_RMQLOG_DOMAIN(federation)).
diff --git a/deps/rabbitmq_federation/include/rabbit_federation.hrl b/deps/rabbitmq_federation/include/rabbit_federation.hrl
index af92e1aa25..a8706da0d5 100644
--- a/deps/rabbitmq_federation/include/rabbit_federation.hrl
+++ b/deps/rabbitmq_federation/include/rabbit_federation.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-record(upstream, {uris,
@@ -19,7 +19,9 @@
ha_policy,
name,
bind_nowait,
- resource_cleanup_mode}).
+ resource_cleanup_mode,
+ channel_use_mode
+ }).
-record(upstream_params,
{uri,
@@ -42,3 +44,5 @@
-define(DEF_PREFETCH, 1000).
-define(FEDERATION_GUIDE_URL, <<"https://rabbitmq.com/federation.html">>).
+
+-define(FEDERATION_PG_SCOPE, rabbitmq_federation_pg_scope). \ No newline at end of file
diff --git a/deps/rabbitmq_federation/rabbitmq-components.mk b/deps/rabbitmq_federation/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_federation/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.FederationStatusCommand.erl b/deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.FederationStatusCommand.erl
index bab4dddeec..e613ea65b6 100644
--- a/deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.FederationStatusCommand.erl
+++ b/deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.FederationStatusCommand.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.FederationStatusCommand').
@@ -11,6 +11,8 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref({'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1}).
+
-export([
usage/0,
usage_additional/0,
diff --git a/deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartFederationLinkCommand.erl b/deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartFederationLinkCommand.erl
index 8d062c692c..68e740ba1c 100644
--- a/deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartFederationLinkCommand.erl
+++ b/deps/rabbitmq_federation/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartFederationLinkCommand.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.RestartFederationLinkCommand').
@@ -11,6 +11,8 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref({'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1}).
+
-export([
usage/0,
usage_additional/0,
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_app.erl b/deps/rabbitmq_federation/src/rabbit_federation_app.erl
index ee7ba91e5f..de03d67e6a 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_app.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_app.erl
@@ -2,16 +2,18 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_app).
+-include("rabbit_federation.hrl").
+
-behaviour(application).
-export([start/2, stop/1]).
%% Dummy supervisor - see Ulf Wiger's comment at
-%% http://erlang.2086793.n4.nabble.com/initializing-library-applications-without-processes-td2094473.html
+%% http://erlang.org/pipermail/erlang-questions/2010-April/050508.html
%% All of our actual server processes are supervised by
%% rabbit_federation_sup, which is started by a rabbit_boot_step
@@ -27,12 +29,18 @@
-export([init/1]).
start(_Type, _StartArgs) ->
- rabbit_federation_exchange_link:go(),
- rabbit_federation_queue_link:go(),
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
stop(_State) ->
+ rabbit_federation_pg:stop_scope(),
ok.
+
%%----------------------------------------------------------------------------
-init([]) -> {ok, {{one_for_one, 3, 10}, []}}.
+init([]) ->
+ Flags = #{
+ strategy => one_for_one,
+ intensity => 3,
+ period => 10
+ },
+ {ok, {Flags, []}}.
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_db.erl b/deps/rabbitmq_federation/src/rabbit_federation_db.erl
index e35e3646a8..30a6e6fa60 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_db.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_db.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_db).
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_event.erl b/deps/rabbitmq_federation/src/rabbit_federation_event.erl
index 417b8ecba3..18aaacfa5d 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_event.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_event.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_event).
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_exchange.erl b/deps/rabbitmq_federation/src/rabbit_federation_exchange.erl
index 6b85b6756b..a0fa137317 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_exchange.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_exchange.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% TODO rename this
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl b/deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl
index 869ab047ae..ed9f9d0df4 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl
@@ -2,14 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_exchange_link).
-%% pg2 is deprecated in OTP 23.
--compile(nowarn_deprecated_function).
-
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_federation.hrl").
@@ -51,7 +48,9 @@
%% start during exchange recovery, when rabbit is not fully started
%% and the Erlang client is not running. This then gets invoked when
%% the federation app is started.
-go() -> cast(go).
+go() ->
+ rabbit_federation_pg:start_scope(),
+ cast(go).
add_binding(S, XN, B) -> cast(XN, {enqueue, S, {add_binding, B}}).
remove_bindings(S, XN, Bs) -> cast(XN, {enqueue, S, {remove_bindings, Bs}}).
@@ -79,7 +78,7 @@ init({Upstream, XName}) ->
gen_server2:cast(self(), maybe_go),
{ok, {not_started, {Upstream, UParams, XName}}};
{error, not_found} ->
- rabbit_federation_link_util:log_warning(XName, "not found, stopping link~n", []),
+ rabbit_federation_link_util:log_warning(XName, "not found, stopping link", []),
{stop, gone}
end.
@@ -89,11 +88,8 @@ handle_call(list_routing_keys, _From, State = #state{bindings = Bindings}) ->
handle_call(Msg, _From, State) ->
{stop, {unexpected_call, Msg}, State}.
-handle_cast(maybe_go, S0 = {not_started, _Args}) ->
- case federation_up() of
- true -> go(S0);
- false -> {noreply, S0}
- end;
+handle_cast(maybe_go, State = {not_started, _Args}) ->
+ go(State);
handle_cast(go, S0 = {not_started, _Args}) ->
go(S0);
@@ -114,7 +110,7 @@ handle_cast({enqueue, Serial, Cmd},
{noreply, play_back_commands(State#state{waiting_cmds = Waiting1})}
catch exit:{{shutdown, {server_initiated_close, 404, Text}}, _} ->
rabbit_federation_link_util:log_warning(
- XName, "detected upstream changes, restarting link: ~p~n", [Text]),
+ XName, "detected upstream changes, restarting link: ~p", [Text]),
{stop, {shutdown, restart}, State}
end;
@@ -247,21 +243,16 @@ cast(Msg) -> [gen_server2:cast(Pid, Msg) || Pid <- all()].
cast(XName, Msg) -> [gen_server2:cast(Pid, Msg) || Pid <- x(XName)].
join(Name) ->
- pg2:create(pgname(Name)),
- ok = pg2:join(pgname(Name), self()).
+ ok = pg:join(?FEDERATION_PG_SCOPE, pgname(Name), self()).
all() ->
- pg2:create(pgname(rabbit_federation_exchanges)),
- pg2:get_members(pgname(rabbit_federation_exchanges)).
+ pg:get_members(?FEDERATION_PG_SCOPE, pgname(rabbit_federation_exchanges)).
x(XName) ->
- pg2:create(pgname({rabbit_federation_exchange, XName})),
- pg2:get_members(pgname({rabbit_federation_exchange, XName})).
+ pg:get_members(?FEDERATION_PG_SCOPE, pgname({rabbit_federation_exchange, XName})).
%%----------------------------------------------------------------------------
-federation_up() -> is_pid(whereis(rabbit_federation_app)).
-
handle_command({add_binding, Binding}, State) ->
add_binding(Binding, State);
@@ -430,11 +421,15 @@ key(#binding{key = Key, args = Args}) -> {Key, Args}.
go(S0 = {not_started, {Upstream, UParams, DownXName}}) ->
Unacked = rabbit_federation_link_util:unacked_new(),
-
log_link_startup_attempt(Upstream, DownXName),
rabbit_federation_link_util:start_conn_ch(
fun (Conn, Ch, DConn, DCh) ->
- {ok, CmdCh} = open_cmd_channel(Conn, Upstream, UParams, DownXName, S0),
+ {ok, CmdCh} =
+ case Upstream#upstream.channel_use_mode of
+ single -> reuse_command_channel(Ch, Upstream, DownXName);
+ multiple -> open_command_channel(Conn, Upstream, UParams, DownXName, S0);
+ _ -> open_command_channel(Conn, Upstream, UParams, DownXName, S0)
+ end,
erlang:monitor(process, CmdCh),
Props = pget(server_properties,
amqp_connection:info(Conn, [server_properties])),
@@ -480,11 +475,18 @@ go(S0 = {not_started, {Upstream, UParams, DownXName}}) ->
{noreply, State#state{internal_exchange_timer = TRef}}
end, Upstream, UParams, DownXName, S0).
-log_link_startup_attempt(OUpstream, DownXName) ->
- rabbit_log_federation:debug("Will try to start a federation link for ~s, upstream: '~s'",
- [rabbit_misc:rs(DownXName), OUpstream#upstream.name]).
+log_link_startup_attempt(#upstream{name = Name, channel_use_mode = ChMode}, DownXName) ->
+ rabbit_log_federation:debug("Will try to start a federation link for ~s, upstream: '~s', channel use mode: ~s",
+ [rabbit_misc:rs(DownXName), Name, ChMode]).
+
+%% If channel use mode is 'single', reuse the message transfer channel.
+%% Otherwise open a separate one.
+reuse_command_channel(MainCh, #upstream{name = UName}, DownXName) ->
+ rabbit_log_federation:debug("Will use a single channel for both schema operations and message transfer on links to upstream '~s' for downstream federated ~s",
+ [UName, rabbit_misc:rs(DownXName)]),
+ {ok, MainCh}.
-open_cmd_channel(Conn, Upstream = #upstream{name = UName}, UParams, DownXName, S0) ->
+open_command_channel(Conn, Upstream = #upstream{name = UName}, UParams, DownXName, S0) ->
rabbit_log_federation:debug("Will open a command channel to upstream '~s' for downstream federated ~s",
[UName, rabbit_misc:rs(DownXName)]),
case amqp_connection:open_channel(Conn) of
@@ -627,11 +629,11 @@ check_internal_exchange(IntXNameBin,
Params, XFU, fun(404, Text) ->
rabbit_federation_link_util:log_warning(
XName, "detected internal upstream exchange changes,"
- " restarting link: ~p~n", [Text]),
+ " restarting link: ~p", [Text]),
upstream_not_found;
(Code, Text) ->
rabbit_federation_link_util:log_warning(
- XName, "internal upstream exchange check failed: ~p ~p~n",
+ XName, "internal upstream exchange check failed: ~p ~p",
[Code, Text]),
error
end).
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl b/deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl
index fda76a5070..0d44d31e12 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_exchange_link_sup_sup).
@@ -21,6 +21,11 @@
%%----------------------------------------------------------------------------
start_link() ->
+ _ = pg:start_link(),
+ %% This scope is used by concurrently starting exchange and queue links,
+ %% and other places, so we have to start it very early outside of the supervision tree.
+ %% The scope is stopped in stop/1.
+ rabbit_federation_pg:start_scope(),
mirrored_supervisor:start_link({local, ?SUPERVISOR}, ?SUPERVISOR,
fun rabbit_misc:execute_mnesia_transaction/1,
?MODULE, []).
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl b/deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl
index 27d1b50277..1d2ae2d991 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_link_sup).
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_link_util.erl b/deps/rabbitmq_federation/src/rabbit_federation_link_util.erl
index a5fd560f0b..74b4ec1d1f 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_link_util.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_link_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_link_util).
@@ -56,7 +56,7 @@ start_conn_ch(Fun, OUpstream, OUParams,
try
R = Fun(Conn, Ch, DConn, DCh),
log_info(
- XorQName, "connected to ~s~n",
+ XorQName, "connected to ~s",
[rabbit_federation_upstream:params_to_string(
UParams)]),
Name = pget(name, amqp_connection:info(DConn, [name])),
@@ -290,7 +290,7 @@ log_terminate(shutdown, Upstream, UParams, XorQName) ->
%% the link because configuration has changed. So try to shut down
%% nicely so that we do not cause unacked messages to be
%% redelivered.
- log_info(XorQName, "disconnecting from ~s~n",
+ log_info(XorQName, "disconnecting from ~s",
[rabbit_federation_upstream:params_to_string(UParams)]),
rabbit_federation_status:remove(Upstream, XorQName);
@@ -332,7 +332,7 @@ disposable_channel_call(Conn, Method, ErrFun) ->
end
catch
Exception:Reason ->
- rabbit_log_federation:error("Federation link could not create a disposable (one-off) channel due to an error ~p: ~p~n", [Exception, Reason])
+ rabbit_log_federation:error("Federation link could not create a disposable (one-off) channel due to an error ~p: ~p", [Exception, Reason])
end.
disposable_connection_call(Params, Method, ErrFun) ->
@@ -351,14 +351,14 @@ disposable_connection_call(Params, Method, ErrFun) ->
end;
{error, {auth_failure, Message}} ->
rabbit_log_federation:error("Federation link could not open a disposable (one-off) connection "
- "due to an authentication failure: ~s~n", [Message]);
+ "due to an authentication failure: ~s", [Message]);
Error ->
rabbit_log_federation:error("Federation link could not open a disposable (one-off) connection, "
- "reason: ~p~n", [Error]),
+ "reason: ~p", [Error]),
Error
end
catch
Exception:Reason ->
rabbit_log_federation:error("Federation link could not create a disposable (one-off) connection "
- "due to an error ~p: ~p~n", [Exception, Reason])
+ "due to an error ~p: ~p", [Exception, Reason])
end.
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_parameters.erl b/deps/rabbitmq_federation/src/rabbit_federation_parameters.erl
index 928e41dc0f..c0751ae492 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_parameters.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_parameters.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_parameters).
@@ -87,9 +87,12 @@ shared_validation() ->
{<<"trust-user-id">>, fun rabbit_parameter_validation:boolean/2, optional},
{<<"ack-mode">>, rabbit_parameter_validation:enum(
['no-ack', 'on-publish', 'on-confirm']), optional},
- {<<"resource-cleanup-mode">>, rabbit_parameter_validation:enum(['default', 'never']), optional},
+ {<<"resource-cleanup-mode">>, rabbit_parameter_validation:enum(
+ ['default', 'never']), optional},
{<<"ha-policy">>, fun rabbit_parameter_validation:binary/2, optional},
- {<<"bind-nowait">>, fun rabbit_parameter_validation:boolean/2, optional}].
+ {<<"bind-nowait">>, fun rabbit_parameter_validation:boolean/2, optional},
+ {<<"channel-use-mode">>, rabbit_parameter_validation:enum(
+ ['multiple', 'single']), optional}].
validate_uri(Name, Term) when is_binary(Term) ->
case rabbit_parameter_validation:binary(Name, Term) of
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_pg.erl b/deps/rabbitmq_federation/src/rabbit_federation_pg.erl
new file mode 100644
index 0000000000..b262f58abc
--- /dev/null
+++ b/deps/rabbitmq_federation/src/rabbit_federation_pg.erl
@@ -0,0 +1,25 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_federation_pg).
+
+-include("rabbit_federation.hrl").
+
+-export([start_scope/0, stop_scope/0]).
+
+start_scope() ->
+ rabbit_log_federation:debug("Starting pg scope ~s", [?FEDERATION_PG_SCOPE]),
+ _ = pg:start_link(?FEDERATION_PG_SCOPE).
+
+stop_scope() ->
+ case whereis(?FEDERATION_PG_SCOPE) of
+ Pid when is_pid(Pid) ->
+ rabbit_log_federation:debug("Stopping pg scope ~s", [?FEDERATION_PG_SCOPE]),
+ exit(Pid, normal);
+ _ ->
+ ok
+ end. \ No newline at end of file
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_queue.erl b/deps/rabbitmq_federation/src/rabbit_federation_queue.erl
index 3117792589..ea996aa8de 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_queue.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_queue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_queue).
@@ -48,8 +48,7 @@ policy_changed(Q1, Q2) when ?is_amqqueue(Q1) ->
QName = amqqueue:get_name(Q1),
case rabbit_amqqueue:lookup(QName) of
{ok, Q0} when ?is_amqqueue(Q0) ->
- QPid = amqqueue:get_pid(Q0),
- rpc:call(node(QPid), rabbit_federation_queue,
+ rpc:call(amqqueue:qnode(Q0), rabbit_federation_queue,
policy_changed_local, [Q1, Q2]);
{error, not_found} ->
ok
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl b/deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl
index 97389cb8f6..b9b9ae718f 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl
@@ -2,14 +2,11 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_queue_link).
-%% pg2 is deprecated in OTP 23.
--compile(nowarn_deprecated_function).
-
-include_lib("rabbit/include/amqqueue.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_federation.hrl").
@@ -33,7 +30,9 @@ start_link(Args) ->
run(QName) -> cast(QName, run).
pause(QName) -> cast(QName, pause).
-go() -> cast(go).
+go() ->
+ rabbit_federation_pg:start_scope(),
+ cast(go).
%%----------------------------------------------------------------------------
%%call(QName, Msg) -> [gen_server2:call(Pid, Msg, infinity) || Pid <- q(QName)].
@@ -41,20 +40,18 @@ cast(Msg) -> [gen_server2:cast(Pid, Msg) || Pid <- all()].
cast(QName, Msg) -> [gen_server2:cast(Pid, Msg) || Pid <- q(QName)].
join(Name) ->
- pg2:create(pgname(Name)),
- ok = pg2:join(pgname(Name), self()).
+ ok = pg:join(?FEDERATION_PG_SCOPE, pgname(Name), self()).
all() ->
- pg2:create(pgname(rabbit_federation_queues)),
- pg2:get_members(pgname(rabbit_federation_queues)).
+ pg:get_members(?FEDERATION_PG_SCOPE, pgname(rabbit_federation_queues)).
q(QName) ->
- pg2:create(pgname({rabbit_federation_queue, QName})),
- pg2:get_members(pgname({rabbit_federation_queue, QName})).
-
-federation_up() ->
- proplists:is_defined(rabbitmq_federation,
- application:which_applications(infinity)).
+ case pg:get_members(?FEDERATION_PG_SCOPE, pgname({rabbit_federation_queue, QName})) of
+ {error, {no_such_group, _}} ->
+ [];
+ Members ->
+ Members
+ end.
%%----------------------------------------------------------------------------
@@ -75,7 +72,7 @@ init({Upstream, Queue}) when ?is_amqqueue(Queue) ->
upstream = Upstream,
upstream_params = UParams}};
{error, not_found} ->
- rabbit_federation_link_util:log_warning(QName, "not found, stopping link~n", []),
+ rabbit_federation_link_util:log_warning(QName, "not found, stopping link", []),
{stop, gone}
end.
@@ -83,10 +80,7 @@ handle_call(Msg, _From, State) ->
{stop, {unexpected_call, Msg}, State}.
handle_cast(maybe_go, State) ->
- case federation_up() of
- true -> go(State);
- false -> {noreply, State}
- end;
+ go(State);
handle_cast(go, State = #not_started{}) ->
go(State);
@@ -180,6 +174,7 @@ terminate(Reason, #not_started{upstream = Upstream,
queue = Q}) when ?is_amqqueue(Q) ->
QName = amqqueue:get_name(Q),
rabbit_federation_link_util:log_terminate(Reason, Upstream, UParams, QName),
+ _ = pg:leave(?FEDERATION_PG_SCOPE, pgname({rabbit_federation_queue, QName}), self()),
ok;
terminate(Reason, #state{dconn = DConn,
@@ -191,6 +186,7 @@ terminate(Reason, #state{dconn = DConn,
rabbit_federation_link_util:ensure_connection_closed(DConn),
rabbit_federation_link_util:ensure_connection_closed(Conn),
rabbit_federation_link_util:log_terminate(Reason, Upstream, UParams, QName),
+ _ = pg:leave(?FEDERATION_PG_SCOPE, pgname({rabbit_federation_queue, QName}), self()),
ok.
code_change(_OldVsn, State, _Extra) ->
@@ -212,10 +208,15 @@ go(S0 = #not_started{run = Run,
rabbit_federation_link_util:start_conn_ch(
fun (Conn, Ch, DConn, DCh) ->
check_upstream_suitable(Conn),
- amqp_channel:call(Ch, #'queue.declare'{queue = name(UQueue),
- durable = Durable,
- auto_delete = AutoDelete,
- arguments = Args}),
+ Declare = #'queue.declare'{queue = name(UQueue),
+ durable = Durable,
+ auto_delete = AutoDelete,
+ arguments = Args},
+ rabbit_federation_link_util:disposable_channel_call(
+ Conn, Declare#'queue.declare'{passive = true},
+ fun(?NOT_FOUND, _Text) ->
+ amqp_channel:call(Ch, Declare)
+ end),
case Upstream#upstream.ack_mode of
'no-ack' -> ok;
_ -> amqp_channel:call(
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl b/deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl
index 1f6ec2b88f..0d879b2a3a 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_queue_link_sup_sup).
@@ -22,6 +22,11 @@
%%----------------------------------------------------------------------------
start_link() ->
+ _ = pg:start_link(),
+ %% This scope is used by concurrently starting exchange and queue links,
+ %% and other places, so we have to start it very early outside of the supervision tree.
+ %% The scope is stopped in stop/1.
+ rabbit_federation_pg:start_scope(),
mirrored_supervisor:start_link({local, ?SUPERVISOR}, ?SUPERVISOR,
fun rabbit_misc:execute_mnesia_transaction/1,
?MODULE, []).
@@ -65,7 +70,7 @@ stop_child(Q) ->
[rabbit_misc:rs(QueueName), Err]),
ok
end,
- ok = mirrored_supervisor:delete_child(?SUPERVISOR, id(Q)).
+ _ = mirrored_supervisor:delete_child(?SUPERVISOR, id(Q)).
%%----------------------------------------------------------------------------
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_status.erl b/deps/rabbitmq_federation/src/rabbit_federation_status.erl
index 04afec990d..ed14ba472d 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_status.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_status.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_status).
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_sup.erl b/deps/rabbitmq_federation/src/rabbit_federation_sup.erl
index d3642b52c2..d0f20aee0b 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_sup.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_sup).
@@ -12,6 +12,7 @@
%% Supervises everything. There is just one of these.
-include_lib("rabbit_common/include/rabbit.hrl").
+
-define(SUPERVISOR, rabbit_federation_sup).
-export([start_link/0, stop/0]).
@@ -45,19 +46,38 @@ stop() ->
%%----------------------------------------------------------------------------
init([]) ->
- Status = {status, {rabbit_federation_status, start_link, []},
- transient, ?WORKER_WAIT, worker,
- [rabbit_federation_status]},
- XLinkSupSup = {x_links,
- {rabbit_federation_exchange_link_sup_sup, start_link, []},
- transient, ?SUPERVISOR_WAIT, supervisor,
- [rabbit_federation_exchange_link_sup_sup]},
- QLinkSupSup = {q_links,
- {rabbit_federation_queue_link_sup_sup, start_link, []},
- transient, ?SUPERVISOR_WAIT, supervisor,
- [rabbit_federation_queue_link_sup_sup]},
+ Status = #{
+ id => status,
+ start => {rabbit_federation_status, start_link, []},
+ restart => transient,
+ shutdown => ?WORKER_WAIT,
+ type => worker,
+ modules => [rabbit_federation_status]
+ },
+ XLinkSupSup = #{
+ id => x_links,
+ start => {rabbit_federation_exchange_link_sup_sup, start_link, []},
+ restart => transient,
+ shutdown => ?SUPERVISOR_WAIT,
+ type => supervisor,
+ modules =>[rabbit_federation_exchange_link_sup_sup]
+ },
+ QLinkSupSup = #{
+ id => q_links,
+ start => {rabbit_federation_queue_link_sup_sup, start_link, []},
+ restart => transient,
+ shutdown => ?SUPERVISOR_WAIT,
+ type => supervisor,
+ modules => [rabbit_federation_queue_link_sup_sup]
+ },
%% with default reconnect-delay of 5 second, this supports up to
%% 100 links constantly failing and being restarted a minute
%% (or 200 links if reconnect-delay is 10 seconds, 600 with 30 seconds,
%% etc: N * (60/reconnect-delay) <= 1200)
- {ok, {{one_for_one, 1200, 60}, [Status, XLinkSupSup, QLinkSupSup]}}.
+ Flags = #{
+ strategy => one_for_one,
+ intensity => 1200,
+ period => 60
+ },
+ Specs = [Status, XLinkSupSup, QLinkSupSup],
+ {ok, {Flags, Specs}}.
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_upstream.erl b/deps/rabbitmq_federation/src/rabbit_federation_upstream.erl
index e079b850b5..ebb29067b0 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_upstream.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_upstream.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_upstream).
@@ -139,7 +139,9 @@ from_upstream_or_set(US, Name, U, XorQ) ->
ha_policy = bget('ha-policy', US, U, none),
name = Name,
bind_nowait = bget('bind-nowait', US, U, false),
- resource_cleanup_mode = to_atom(bget('resource-cleanup-mode', US, U, <<"default">>))}.
+ resource_cleanup_mode = to_atom(bget('resource-cleanup-mode', US, U, <<"default">>)),
+ channel_use_mode = to_atom(bget('channel-use-mode', US, U, multiple))
+ }.
%%----------------------------------------------------------------------------
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl b/deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl
index 6018dd90a5..8f5e92d81b 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_upstream_exchange).
diff --git a/deps/rabbitmq_federation/src/rabbit_federation_util.erl b/deps/rabbitmq_federation/src/rabbit_federation_util.erl
index 160bac996e..e8f05654dd 100644
--- a/deps/rabbitmq_federation/src/rabbit_federation_util.erl
+++ b/deps/rabbitmq_federation/src/rabbit_federation_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_util).
diff --git a/deps/rabbitmq_federation/src/rabbit_log_federation.erl b/deps/rabbitmq_federation/src/rabbit_log_federation.erl
new file mode 100644
index 0000000000..315f386eda
--- /dev/null
+++ b/deps/rabbitmq_federation/src/rabbit_log_federation.erl
@@ -0,0 +1,107 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_federation).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include("logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+%%----------------------------------------------------------------------------
+
+-spec debug(string()) -> 'ok'.
+-spec debug(string(), [any()]) -> 'ok'.
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec info(string()) -> 'ok'.
+-spec info(string(), [any()]) -> 'ok'.
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec notice(string()) -> 'ok'.
+-spec notice(string(), [any()]) -> 'ok'.
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec warning(string()) -> 'ok'.
+-spec warning(string(), [any()]) -> 'ok'.
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec error(string()) -> 'ok'.
+-spec error(string(), [any()]) -> 'ok'.
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec critical(string()) -> 'ok'.
+-spec critical(string(), [any()]) -> 'ok'.
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec alert(string()) -> 'ok'.
+-spec alert(string(), [any()]) -> 'ok'.
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec emergency(string()) -> 'ok'.
+-spec emergency(string(), [any()]) -> 'ok'.
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec none(string()) -> 'ok'.
+-spec none(string(), [any()]) -> 'ok'.
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+
+%%----------------------------------------------------------------------------
+
+debug(Format) -> debug(Format, []).
+debug(Format, Args) -> debug(self(), Format, Args).
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEDERATION}).
+
+info(Format) -> info(Format, []).
+info(Format, Args) -> info(self(), Format, Args).
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEDERATION}).
+
+notice(Format) -> notice(Format, []).
+notice(Format, Args) -> notice(self(), Format, Args).
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEDERATION}).
+
+warning(Format) -> warning(Format, []).
+warning(Format, Args) -> warning(self(), Format, Args).
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEDERATION}).
+
+error(Format) -> error(Format, []).
+error(Format, Args) -> error(self(), Format, Args).
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEDERATION}).
+
+critical(Format) -> critical(Format, []).
+critical(Format, Args) -> critical(self(), Format, Args).
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEDERATION}).
+
+alert(Format) -> alert(Format, []).
+alert(Format, Args) -> alert(self(), Format, Args).
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEDERATION}).
+
+emergency(Format) -> emergency(Format, []).
+emergency(Format, Args) -> emergency(self(), Format, Args).
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_FEDERATION}).
+
+none(_Format) -> ok.
+none(_Format, _Args) -> ok.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbitmq_federation/test/exchange_SUITE.erl b/deps/rabbitmq_federation/test/exchange_SUITE.erl
index a0cd51c7c9..d20613df1b 100644
--- a/deps/rabbitmq_federation/test/exchange_SUITE.erl
+++ b/deps/rabbitmq_federation/test/exchange_SUITE.erl
@@ -2,13 +2,14 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(exchange_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_federation.hrl").
@@ -29,6 +30,7 @@
all() ->
[
{group, without_automatic_setup},
+ {group, channel_use_mode_single},
{group, without_disambiguate},
{group, with_disambiguate}
].
@@ -76,11 +78,24 @@ groups() ->
upstream_has_no_federation
]}
]}
+ ]},
+ {channel_use_mode_single, [], [
+ simple,
+ single_channel_mode,
+ multiple_upstreams,
+ multiple_upstreams_pattern,
+ multiple_uris,
+ multiple_downstreams,
+ e2e,
+ unbind_on_delete,
+ unbind_on_unbind,
+ unbind_gets_transmitted,
+ federate_unfederate
]}
].
suite() ->
- [{timetrap, {minutes, 5}}].
+ [{timetrap, {minutes, 3}}].
%% -------------------------------------------------------------------
%% Testsuite setup/teardown.
@@ -93,6 +108,24 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
+%% Some of the "regular" tests but in the single channel mode.
+init_per_group(channel_use_mode_single, Config) ->
+ SetupFederation = [
+ fun(Config1) ->
+ rabbit_federation_test_util:setup_federation_with_upstream_params(Config1, [
+ {<<"channel-use-mode">>, <<"single">>}
+ ])
+ end
+ ],
+ Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_nodename_suffix, Suffix},
+ {rmq_nodes_clustered, false}
+ ]),
+ rabbit_ct_helpers:run_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps() ++
+ SetupFederation);
init_per_group(without_automatic_setup, Config) ->
Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
Config1 = rabbit_ct_helpers:set_config(Config, [
@@ -175,6 +208,17 @@ simple(Config) ->
publish_expect(Ch, <<"upstream">>, <<"key">>, Q, <<"HELLO">>)
end, upstream_downstream()).
+single_channel_mode(Config) ->
+ with_conn_and_ch(Config,
+ fun (Conn, Ch) ->
+ Infos = amqp_connection:info(Conn, [num_channels]),
+ N = proplists:get_value(num_channels, Infos),
+ ?assertEqual(1, N),
+ Q = bind_queue(Ch, <<"fed.downstream">>, <<"key">>),
+ await_binding(Config, 0, <<"upstream">>, <<"key">>),
+ publish_expect(Ch, <<"upstream">>, <<"key">>, Q, <<"HELLO">>)
+ end, upstream_downstream()).
+
multiple_upstreams(Config) ->
with_ch(Config,
fun (Ch) ->
@@ -366,58 +410,50 @@ user_id(Config) ->
end
end,
- wait_for_federation(
- 90,
- fun() ->
- VHost = <<"/">>,
- X1s = rabbit_ct_broker_helpers:rpc(
- Config, Rabbit, rabbit_exchange, list, [VHost]),
- L1 =
- [X || X <- X1s,
- X#exchange.name =:= #resource{virtual_host = VHost,
- kind = exchange,
- name = <<"test">>},
- X#exchange.scratches =:= [{federation,
- [{{<<"upstream-2">>,
- <<"test">>},
- <<"B">>}]}]],
- X2s = rabbit_ct_broker_helpers:rpc(
- Config, Hare, rabbit_exchange, list, [VHost]),
- L2 =
- [X || X <- X2s,
- X#exchange.type =:= 'x-federation-upstream'],
- [] =/= L1 andalso [] =/= L2 andalso
- has_internal_federated_queue(Config, Hare, VHost)
- end),
+ ?awaitMatch({L1, L2, true} when L1 =/= [] andalso L2 =/= [],
+ begin
+ VHost = <<"/">>,
+ X1s = rabbit_ct_broker_helpers:rpc(
+ Config, Rabbit, rabbit_exchange, list, [VHost]),
+ L1 = [X || X <- X1s,
+ X#exchange.name =:= #resource{virtual_host = VHost,
+ kind = exchange,
+ name = <<"test">>},
+ X#exchange.scratches =:= [{federation,
+ [{{<<"upstream-2">>,
+ <<"test">>},
+ <<"B">>}]}]],
+ X2s = rabbit_ct_broker_helpers:rpc(
+ Config, Hare, rabbit_exchange, list, [VHost]),
+ L2 = [X || X <- X2s,
+ X#exchange.type =:= 'x-federation-upstream'],
+ {L1, L2, has_internal_federated_queue(Config, Hare, VHost)}
+ end, 90000),
publish(Ch2, <<"test">>, <<"key">>, Msg),
expect(Ch, Q, ExpectUser(undefined)),
set_policy_upstream(Config, Rabbit, <<"^test$">>,
rabbit_ct_broker_helpers:node_uri(Config, 1),
[{<<"trust-user-id">>, true}]),
- wait_for_federation(
- 90,
- fun() ->
- VHost = <<"/">>,
- X1s = rabbit_ct_broker_helpers:rpc(
- Config, Rabbit, rabbit_exchange, list, [VHost]),
- L1 =
- [X || X <- X1s,
- X#exchange.name =:= #resource{virtual_host = VHost,
- kind = exchange,
- name = <<"test">>},
- X#exchange.scratches =:= [{federation,
- [{{<<"upstream-2">>,
- <<"test">>},
- <<"A">>}]}]],
- X2s = rabbit_ct_broker_helpers:rpc(
- Config, Hare, rabbit_exchange, list, [VHost]),
- L2 =
- [X || X <- X2s,
- X#exchange.type =:= 'x-federation-upstream'],
- [] =/= L1 andalso [] =/= L2 andalso
- has_internal_federated_queue(Config, Hare, VHost)
- end),
+ ?awaitMatch({L1, L2, true} when L1 =/= [] andalso L2 =/= [],
+ begin
+ VHost = <<"/">>,
+ X1s = rabbit_ct_broker_helpers:rpc(
+ Config, Rabbit, rabbit_exchange, list, [VHost]),
+ L1 = [X || X <- X1s,
+ X#exchange.name =:= #resource{virtual_host = VHost,
+ kind = exchange,
+ name = <<"test">>},
+ X#exchange.scratches =:= [{federation,
+ [{{<<"upstream-2">>,
+ <<"test">>},
+ <<"A">>}]}]],
+ X2s = rabbit_ct_broker_helpers:rpc(
+ Config, Hare, rabbit_exchange, list, [VHost]),
+ L2 = [X || X <- X2s,
+ X#exchange.type =:= 'x-federation-upstream'],
+ {L1, L2, has_internal_federated_queue(Config, Hare, VHost)}
+ end, 90000),
publish(Ch2, <<"test">>, <<"key">>, Msg),
expect(Ch, Q, ExpectUser(<<"hare-user">>)),
@@ -490,6 +526,7 @@ restart_upstream(Config) ->
Qgoes = bind_queue(Downstream, <<"hare.downstream">>, <<"goes">>),
rabbit_ct_client_helpers:close_channels_and_connection(Config, Hare),
+ timer:sleep(3000),
rabbit_ct_broker_helpers:stop_node(Config, Hare),
Qcomes = bind_queue(Downstream, <<"hare.downstream">>, <<"comes">>),
@@ -933,15 +970,9 @@ delete_federated_queue_upstream(Config) ->
#'queue.declare'{queue = <<"federated.queue">>,
durable = true}),
-
- rabbit_ct_helpers:await_condition(
- fun () ->
- length(federation_links_in_vhost(Config, 0, VH1)) > 0 andalso
- length(federation_links_in_vhost(Config, 0, VH2)) > 0
- end),
-
- ?assertEqual(1, length(federation_links_in_vhost(Config, 0, VH1))),
- ?assertEqual(1, length(federation_links_in_vhost(Config, 0, VH2))),
+ ?awaitMatch({1, 1},
+ {length(federation_links_in_vhost(Config, 0, VH1)),
+ length(federation_links_in_vhost(Config, 0, VH2))}, 90000),
rabbit_ct_broker_helpers:clear_parameter(Config, 0, VH2,
<<"federation-upstream">>, <<"upstream">>),
@@ -1003,38 +1034,33 @@ dynamic_plugin_stop_start(Config) ->
rabbit_registry, lookup_module,
[exchange, 'x-federation-upstream']),
- wait_for_federation(
- 90,
- fun() ->
- VHost = <<"/">>,
- Xs = rabbit_ct_broker_helpers:rpc(
- Config, 0, rabbit_exchange, list, [VHost]),
- L1 =
- [X || X <- Xs,
- X#exchange.type =:= 'x-federation-upstream'],
- L2 =
- [X || X <- Xs,
- X#exchange.name =:= #resource{
- virtual_host = VHost,
- kind = exchange,
- name = X1},
- X#exchange.scratches =:= [{federation,
- [{{<<"localhost">>,
- X1},
- <<"A">>}]}]],
- L3 =
- [X || X <- Xs,
- X#exchange.name =:= #resource{
- virtual_host = VHost,
- kind = exchange,
- name = X2},
- X#exchange.scratches =:= [{federation,
- [{{<<"localhost">>,
- X2},
- <<"B">>}]}]],
- length(L1) =:= 2 andalso [] =/= L2 andalso [] =/= L3 andalso
- has_internal_federated_queue(Config, 0, VHost)
- end),
+ ?awaitMatch({L1, L2, L3, true} when length(L1) == 2; L2 =/= []; L3 =/= [],
+ begin
+ VHost = <<"/">>,
+ Xs = rabbit_ct_broker_helpers:rpc(
+ Config, 0, rabbit_exchange, list, [VHost]),
+ L1 = [X || X <- Xs,
+ X#exchange.type =:= 'x-federation-upstream'],
+ L2 = [X || X <- Xs,
+ X#exchange.name =:= #resource{
+ virtual_host = VHost,
+ kind = exchange,
+ name = X1},
+ X#exchange.scratches =:= [{federation,
+ [{{<<"localhost">>,
+ X1},
+ <<"A">>}]}]],
+ L3 = [X || X <- Xs,
+ X#exchange.name =:= #resource{
+ virtual_host = VHost,
+ kind = exchange,
+ name = X2},
+ X#exchange.scratches =:= [{federation,
+ [{{<<"localhost">>,
+ X2},
+ <<"B">>}]}]],
+ {L1, L2, L3, has_internal_federated_queue(Config, 0, VHost)}
+ end, 90000),
%% Test both exchanges work. They are just federated to
%% themselves so should duplicate messages.
@@ -1059,29 +1085,24 @@ dynamic_plugin_cleanup_stop_start(Config) ->
%% Declare a federated exchange, a link starts
assert_connections(Config, 0, [X1], [<<"localhost">>]),
- wait_for_federation(
- 90,
- fun() ->
- VHost = <<"/">>,
- Xs = rabbit_ct_broker_helpers:rpc(
- Config, 0, rabbit_exchange, list, [VHost]),
- L1 =
- [X || X <- Xs,
- X#exchange.type =:= 'x-federation-upstream'],
- L2 =
- [X || X <- Xs,
- X#exchange.name =:= #resource{
- virtual_host = VHost,
- kind = exchange,
- name = X1},
- X#exchange.scratches =:= [{federation,
- [{{<<"localhost">>,
- X1},
- <<"B">>}]}]],
- [] =/= L1 andalso [] =/= L2 andalso
- has_internal_federated_queue(Config, 0, VHost)
- end),
-
+ ?awaitMatch({L1, L2, true} when L1 =/= []; L2 =/= [],
+ begin
+ VHost = <<"/">>,
+ Xs = rabbit_ct_broker_helpers:rpc(
+ Config, 0, rabbit_exchange, list, [VHost]),
+ L1 = [X || X <- Xs,
+ X#exchange.type =:= 'x-federation-upstream'],
+ L2 = [X || X <- Xs,
+ X#exchange.name =:= #resource{
+ virtual_host = VHost,
+ kind = exchange,
+ name = X1},
+ X#exchange.scratches =:= [{federation,
+ [{{<<"localhost">>,
+ X1},
+ <<"B">>}]}]],
+ {L1, L2, has_internal_federated_queue(Config, 0, VHost)}
+ end, 90000),
?assert(has_internal_federated_exchange(Config, 0, <<"/">>)),
?assert(has_internal_federated_queue(Config, 0, <<"/">>)),
@@ -1107,38 +1128,34 @@ dynamic_policy_cleanup(Config) ->
%% Declare federated exchange - get link
assert_connections(Config, 0, [X1], [<<"localhost">>]),
- wait_for_federation(
- 90,
- fun() ->
- VHost = <<"/">>,
- Xs = rabbit_ct_broker_helpers:rpc(
- Config, 0, rabbit_exchange, list, [VHost]),
- L1 =
- [X || X <- Xs,
- X#exchange.type =:= 'x-federation-upstream'],
- L2 =
- [X || X <- Xs,
- X#exchange.name =:= #resource{
- virtual_host = VHost,
- kind = exchange,
- name = X1},
- X#exchange.scratches =:= [{federation,
- [{{<<"localhost">>,
- X1},
- <<"B">>}]}]],
- [] =/= L1 andalso [] =/= L2 andalso
- has_internal_federated_queue(Config, 0, VHost)
- end),
+ ?awaitMatch({L1, L2, true} when L1 =/= []; L2 =/= [],
+ begin
+ VHost = <<"/">>,
+ Xs = rabbit_ct_broker_helpers:rpc(
+ Config, 0, rabbit_exchange, list, [VHost]),
+ L1 = [X || X <- Xs,
+ X#exchange.type =:= 'x-federation-upstream'],
+ L2 = [X || X <- Xs,
+ X#exchange.name =:= #resource{
+ virtual_host = VHost,
+ kind = exchange,
+ name = X1},
+ X#exchange.scratches =:= [{federation,
+ [{{<<"localhost">>,
+ X1},
+ <<"B">>}]}]],
+ {L1, L2, has_internal_federated_queue(Config, 0, VHost)}
+ end, 90000),
?assert(has_internal_federated_exchange(Config, 0, <<"/">>)),
?assert(has_internal_federated_queue(Config, 0, <<"/">>)),
clear_policy(Config, 0, <<"dyn">>),
- timer:sleep(5000),
-
%% Internal exchanges and queues need cleanup
- ?assert(not has_internal_federated_exchange(Config, 0, <<"/">>)),
- ?assert(not has_internal_federated_queue(Config, 0, <<"/">>)),
+ ?awaitMatch({false, false},
+ {has_internal_federated_exchange(Config, 0, <<"/">>),
+ has_internal_federated_queue(Config, 0, <<"/">>)},
+ 10000),
clear_policy(Config, 0, <<"dyn">>),
assert_connections(Config, 0, [X1], [])
@@ -1171,6 +1188,17 @@ with_ch(Config, Fun, Xs) ->
cleanup(Config, 0),
ok.
+with_conn_and_ch(Config, Fun, Xs) ->
+ {Conn, Ch} = rabbit_ct_client_helpers:open_connection_and_channel(Config, 0),
+ declare_all(Ch, Xs),
+ rabbit_federation_test_util:assert_status(Config, 0,
+ Xs, {exchange, upstream_exchange}),
+ Fun(Conn, Ch),
+ delete_all(Ch, Xs),
+ rabbit_ct_client_helpers:close_channel(Ch),
+ cleanup(Config, 0),
+ ok.
+
cleanup(Config, Node) ->
[rabbit_ct_broker_helpers:rpc(
Config, Node, rabbit_amqqueue, delete, [Q, false, false,
@@ -1254,7 +1282,7 @@ await_binding(Config, Node, Vhost, X, Key, ExpectedBindingCount) when is_integer
await_binding(Config, Node, Vhost, X, Key, ExpectedBindingCount, Attempts).
await_binding(_Config, _Node, _Vhost, _X, _Key, ExpectedBindingCount, 0) ->
- {error, rabbit_misc:format("expected ~s bindings but they did not materialize in time", [ExpectedBindingCount])};
+ {error, rabbit_misc:format("expected ~b bindings but they did not materialize in time", [ExpectedBindingCount])};
await_binding(Config, Node, Vhost, X, Key, ExpectedBindingCount, AttemptsLeft) when is_integer(ExpectedBindingCount) ->
case bound_keys_from(Config, Node, Vhost, X, Key) of
Bs when length(Bs) < ExpectedBindingCount ->
diff --git a/deps/rabbitmq_federation/test/federation_status_command_SUITE.erl b/deps/rabbitmq_federation/test/federation_status_command_SUITE.erl
index b7702bcf02..188914cee4 100644
--- a/deps/rabbitmq_federation/test/federation_status_command_SUITE.erl
+++ b/deps/rabbitmq_federation/test/federation_status_command_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(federation_status_command_SUITE).
@@ -118,13 +118,15 @@ run_down_federated(Config) ->
rabbit_federation_test_util:with_ch(
Config,
fun(_) ->
- timer:sleep(3000),
- {stream, ManyProps} = ?CMD:run([], Opts#{only_down => false}),
- Links = [{proplists:get_value(upstream, Props),
- proplists:get_value(status, Props)}
- || Props <- ManyProps],
- [{<<"broken-bunny">>, error}, {<<"localhost">>, running}]
- = lists:sort(Links)
+ rabbit_ct_helpers:await_condition(
+ fun() ->
+ {stream, ManyProps} = ?CMD:run([], Opts#{only_down => false}),
+ Links = [{proplists:get_value(upstream, Props),
+ proplists:get_value(status, Props)}
+ || Props <- ManyProps],
+ [{<<"broken-bunny">>, error}, {<<"localhost">>, running}]
+ == lists:sort(Links)
+ end, 15000)
end,
[rabbit_federation_test_util:q(<<"upstream">>),
rabbit_federation_test_util:q(<<"fed.downstream">>)]),
@@ -132,10 +134,13 @@ run_down_federated(Config) ->
rabbit_federation_test_util:with_ch(
Config,
fun(_) ->
- timer:sleep(3000),
- {stream, [Props]} = ?CMD:run([], Opts#{only_down => true}),
- <<"broken-bunny">> = proplists:get_value(upstream, Props),
- error = proplists:get_value(status, Props)
+ rabbit_ct_helpers:await_condition(
+ fun() ->
+ {stream, Props} = ?CMD:run([], Opts#{only_down => true}),
+ (length(Props) == 1)
+ andalso (<<"broken-bunny">> == proplists:get_value(upstream, hd(Props)))
+ andalso (error == proplists:get_value(status, hd(Props)))
+ end, 15000)
end,
[rabbit_federation_test_util:q(<<"upstream">>),
rabbit_federation_test_util:q(<<"fed.downstream">>)]).
diff --git a/deps/rabbitmq_federation/test/queue_SUITE.erl b/deps/rabbitmq_federation/test/queue_SUITE.erl
index 5c3660fb64..65a08e462f 100644
--- a/deps/rabbitmq_federation/test/queue_SUITE.erl
+++ b/deps/rabbitmq_federation/test/queue_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(queue_SUITE).
@@ -10,13 +10,15 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
+-compile(nowarn_export_all).
-compile(export_all).
-import(rabbit_federation_test_util,
[wait_for_federation/2, expect/3, expect/4,
- set_upstream/4, set_upstream/5, clear_upstream/3, set_policy/5, clear_policy/3,
- set_policy_pattern/5, set_policy_upstream/5, q/1, with_ch/3,
- declare_queue/2, delete_queue/2,
+ set_upstream/4, set_upstream/5, clear_upstream/3, clear_upstream_set/3,
+ set_policy/5, clear_policy/3,
+ set_policy_pattern/5, set_policy_upstream/5, q/2, with_ch/3,
+ maybe_declare_queue/3, delete_queue/2,
federation_links_in_vhost/3]).
-define(INITIAL_WAIT, 6000).
@@ -24,29 +26,46 @@
all() ->
[
- {group, without_disambiguate},
- {group, with_disambiguate}
+ {group, classic_queue},
+ {group, quorum_queue},
+ {group, mixed}
].
groups() ->
- [
- {without_disambiguate, [], [
- {cluster_size_1, [], [
- simple,
- multiple_upstreams,
- multiple_upstreams_pattern,
- multiple_downstreams,
- bidirectional,
- dynamic_reconfiguration,
- federate_unfederate,
- dynamic_plugin_stop_start
- ]}
- ]},
- {with_disambiguate, [], [
- {cluster_size_2, [], [
- restart_upstream
- ]}
- ]}
+ ClusterSize1 = [simple,
+ multiple_upstreams,
+ multiple_upstreams_pattern,
+ multiple_downstreams,
+ message_flow,
+ dynamic_reconfiguration,
+ federate_unfederate,
+ dynamic_plugin_stop_start
+ ],
+ ClusterSize2 = [restart_upstream],
+ [{classic_queue, [], [
+ {without_disambiguate, [], [
+ {cluster_size_1, [], ClusterSize1}
+ ]},
+ {with_disambiguate, [], [
+ {cluster_size_2, [], ClusterSize2}
+ ]}
+ ]},
+ {quorum_queue, [], [
+ {without_disambiguate, [], [
+ {cluster_size_1, [], ClusterSize1}
+ ]},
+ {with_disambiguate, [], [
+ {cluster_size_2, [], ClusterSize2}
+ ]}
+ ]},
+ {mixed, [], [
+ {without_disambiguate, [], [
+ {cluster_size_1, [], ClusterSize1}
+ ]},
+ {with_disambiguate, [], [
+ {cluster_size_2, [], ClusterSize2}
+ ]}
+ ]}
].
%% -------------------------------------------------------------------
@@ -60,6 +79,33 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
+init_per_group(classic_queue, Config) ->
+ rabbit_ct_helpers:set_config(
+ Config,
+ [
+ {source_queue_type, classic},
+ {source_queue_args, [{<<"x-queue-type">>, longstr, <<"classic">>}]},
+ {target_queue_type, classic},
+ {target_queue_args, [{<<"x-queue-type">>, longstr, <<"classic">>}]}
+ ]);
+init_per_group(quorum_queue, Config) ->
+ rabbit_ct_helpers:set_config(
+ Config,
+ [
+ {source_queue_type, quorum},
+ {source_queue_args, [{<<"x-queue-type">>, longstr, <<"quorum">>}]},
+ {target_queue_type, quorum},
+ {target_queue_args, [{<<"x-queue-type">>, longstr, <<"quorum">>}]}
+ ]);
+init_per_group(mixed, Config) ->
+ rabbit_ct_helpers:set_config(
+ Config,
+ [
+ {source_queue_type, classic},
+ {source_queue_args, [{<<"x-queue-type">>, longstr, <<"classic">>}]},
+ {target_queue_type, quorum},
+ {target_queue_args, [{<<"x-queue-type">>, longstr, <<"quorum">>}]}
+ ]);
init_per_group(without_disambiguate, Config) ->
rabbit_ct_helpers:set_config(Config,
{disambiguate_step, []});
@@ -72,10 +118,15 @@ init_per_group(cluster_size_1 = Group, Config) ->
]),
init_per_group1(Group, Config1);
init_per_group(cluster_size_2 = Group, Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config, [
- {rmq_nodes_count, 2}
- ]),
- init_per_group1(Group, Config1).
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip, "not mixed versions compatible"};
+ _ ->
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_nodes_count, 2}
+ ]),
+ init_per_group1(Group, Config1)
+ end.
init_per_group1(Group, Config) ->
SetupFederation = case Group of
@@ -88,15 +139,34 @@ init_per_group1(Group, Config) ->
{rmq_nodename_suffix, Suffix},
{rmq_nodes_clustered, false}
]),
- rabbit_ct_helpers:run_steps(Config1,
- rabbit_ct_broker_helpers:setup_steps() ++
- rabbit_ct_client_helpers:setup_steps() ++
- SetupFederation ++ Disambiguate).
+ Config2 = rabbit_ct_helpers:run_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps() ++
+ SetupFederation ++ Disambiguate),
+ case ?config(target_queue_type, Config2) of
+ quorum ->
+ case rabbit_ct_broker_helpers:enable_feature_flag(Config2, quorum_queue) of
+ ok ->
+ Config2;
+ {skip, Skip} ->
+ Skip;
+ Other ->
+ {skip, Other}
+ end;
+ _ ->
+ Config2
+ end.
end_per_group(without_disambiguate, Config) ->
Config;
end_per_group(with_disambiguate, Config) ->
Config;
+end_per_group(classic_queue, Config) ->
+ Config;
+end_per_group(quorum_queue, Config) ->
+ Config;
+end_per_group(mixed, Config) ->
+ Config;
end_per_group(_, Config) ->
rabbit_ct_helpers:run_steps(Config,
rabbit_ct_client_helpers:teardown_steps() ++
@@ -116,16 +186,18 @@ simple(Config) ->
with_ch(Config,
fun (Ch) ->
expect_federation(Ch, <<"upstream">>, <<"fed.downstream">>)
- end, upstream_downstream()).
+ end, upstream_downstream(Config)).
multiple_upstreams(Config) ->
+ SourceArgs = ?config(source_queue_args, Config),
+ TargetArgs = ?config(target_queue_args, Config),
with_ch(Config,
fun (Ch) ->
expect_federation(Ch, <<"upstream">>, <<"fed12.downstream">>),
expect_federation(Ch, <<"upstream2">>, <<"fed12.downstream">>)
- end, [q(<<"upstream">>),
- q(<<"upstream2">>),
- q(<<"fed12.downstream">>)]).
+ end, [q(<<"upstream">>, SourceArgs),
+ q(<<"upstream2">>, SourceArgs),
+ q(<<"fed12.downstream">>, TargetArgs)]).
multiple_upstreams_pattern(Config) ->
set_upstream(Config, 0, <<"local453x">>,
@@ -145,41 +217,56 @@ multiple_upstreams_pattern(Config) ->
set_policy_pattern(Config, 0, <<"pattern">>, <<"^pattern\.">>, <<"local\\d+x">>),
+ SourceArgs = ?config(source_queue_args, Config),
+ TargetArgs = ?config(target_queue_args, Config),
with_ch(Config,
fun (Ch) ->
expect_federation(Ch, <<"upstream">>, <<"pattern.downstream">>, ?EXPECT_FEDERATION_TIMEOUT),
expect_federation(Ch, <<"upstream2">>, <<"pattern.downstream">>, ?EXPECT_FEDERATION_TIMEOUT)
- end, [q(<<"upstream">>),
- q(<<"upstream2">>),
- q(<<"pattern.downstream">>)]),
+ end, [q(<<"upstream">>, SourceArgs),
+ q(<<"upstream2">>, SourceArgs),
+ q(<<"pattern.downstream">>, TargetArgs)]),
clear_upstream(Config, 0, <<"local453x">>),
clear_upstream(Config, 0, <<"local3214x">>),
clear_policy(Config, 0, <<"pattern">>).
multiple_downstreams(Config) ->
+ Args = ?config(target_queue_args, Config),
with_ch(Config,
fun (Ch) ->
timer:sleep(?INITIAL_WAIT),
expect_federation(Ch, <<"upstream">>, <<"fed.downstream">>, ?EXPECT_FEDERATION_TIMEOUT),
expect_federation(Ch, <<"upstream">>, <<"fed.downstream2">>, ?EXPECT_FEDERATION_TIMEOUT)
- end, upstream_downstream() ++ [q(<<"fed.downstream2">>)]).
+ end, upstream_downstream(Config) ++ [q(<<"fed.downstream2">>, Args)]).
-bidirectional(Config) ->
+message_flow(Config) ->
+ %% TODO: specifc source / target here
+ Args = ?config(source_queue_args, Config),
with_ch(Config,
fun (Ch) ->
timer:sleep(?INITIAL_WAIT),
publish_expect(Ch, <<>>, <<"one">>, <<"one">>, <<"first one">>, ?EXPECT_FEDERATION_TIMEOUT),
publish_expect(Ch, <<>>, <<"two">>, <<"two">>, <<"first two">>, ?EXPECT_FEDERATION_TIMEOUT),
- Seq = lists:seq(1, 100),
+ Seq = lists:seq(1, 50),
[publish(Ch, <<>>, <<"one">>, <<"bulk">>) || _ <- Seq],
[publish(Ch, <<>>, <<"two">>, <<"bulk">>) || _ <- Seq],
- expect(Ch, <<"one">>, repeat(150, <<"bulk">>)),
- expect(Ch, <<"two">>, repeat(50, <<"bulk">>)),
+ expect(Ch, <<"one">>, repeat(100, <<"bulk">>)),
expect_empty(Ch, <<"one">>),
- expect_empty(Ch, <<"two">>)
- end, [q(<<"one">>),
- q(<<"two">>)]).
+ expect_empty(Ch, <<"two">>),
+ [publish(Ch, <<>>, <<"one">>, <<"bulk">>) || _ <- Seq],
+ [publish(Ch, <<>>, <<"two">>, <<"bulk">>) || _ <- Seq],
+ expect(Ch, <<"two">>, repeat(100, <<"bulk">>)),
+ expect_empty(Ch, <<"one">>),
+ expect_empty(Ch, <<"two">>),
+ %% We clear the federation configuration to avoid a race condition
+ %% when deleting the queues in quorum mode. The federation link
+ %% would restart and lead to a state where nothing happened for
+ %% minutes.
+ clear_upstream_set(Config, 0, <<"one">>),
+ clear_upstream_set(Config, 0, <<"two">>)
+ end, [q(<<"one">>, Args),
+ q(<<"two">>, Args)]).
dynamic_reconfiguration(Config) ->
with_ch(Config,
@@ -199,9 +286,10 @@ dynamic_reconfiguration(Config) ->
set_upstream(Config, 0, <<"localhost">>, URI),
set_upstream(Config, 0, <<"localhost">>, URI),
expect_federation(Ch, <<"upstream">>, <<"fed.downstream">>)
- end, upstream_downstream()).
+ end, upstream_downstream(Config)).
federate_unfederate(Config) ->
+ Args = ?config(target_queue_args, Config),
with_ch(Config,
fun (Ch) ->
timer:sleep(?INITIAL_WAIT),
@@ -217,10 +305,11 @@ federate_unfederate(Config) ->
rabbit_ct_broker_helpers:set_policy(Config, 0,
<<"fed">>, <<"^fed\.">>, <<"all">>, [
{<<"federation-upstream-set">>, <<"upstream">>}])
- end, upstream_downstream() ++ [q(<<"fed.downstream2">>)]).
+ end, upstream_downstream(Config) ++ [q(<<"fed.downstream2">>, Args)]).
dynamic_plugin_stop_start(Config) ->
DownQ2 = <<"fed.downstream2">>,
+ Args = ?config(target_queue_args, Config),
with_ch(Config,
fun (Ch) ->
timer:sleep(?INITIAL_WAIT),
@@ -230,14 +319,17 @@ dynamic_plugin_stop_start(Config) ->
expect_federation(Ch, UpQ, DownQ2, ?EXPECT_FEDERATION_TIMEOUT),
%% Disable the plugin, the link disappears
+ ct:pal("Stopping rabbitmq_federation"),
ok = rabbit_ct_broker_helpers:disable_plugin(Config, 0, "rabbitmq_federation"),
expect_no_federation(Ch, UpQ, DownQ1),
expect_no_federation(Ch, UpQ, DownQ2),
- declare_queue(Ch, q(DownQ1)),
- declare_queue(Ch, q(DownQ2)),
+ maybe_declare_queue(Config, Ch, q(DownQ1, Args)),
+ maybe_declare_queue(Config, Ch, q(DownQ2, Args)),
+ ct:pal("Re-starting rabbitmq_federation"),
ok = rabbit_ct_broker_helpers:enable_plugin(Config, 0, "rabbitmq_federation"),
+ timer:sleep(?INITIAL_WAIT),
%% Declare a queue then re-enable the plugin, the links appear
wait_for_federation(
@@ -255,7 +347,7 @@ dynamic_plugin_stop_start(Config) ->
length(L) =:= 2
end),
expect_federation(Ch, UpQ, DownQ1, 120000)
- end, upstream_downstream() ++ [q(DownQ2)]).
+ end, upstream_downstream(Config) ++ [q(DownQ2, Args)]).
restart_upstream(Config) ->
[Rabbit, Hare] = rabbit_ct_broker_helpers:get_node_configs(Config,
@@ -266,8 +358,10 @@ restart_upstream(Config) ->
Downstream = rabbit_ct_client_helpers:open_channel(Config, Rabbit),
Upstream = rabbit_ct_client_helpers:open_channel(Config, Hare),
- declare_queue(Upstream, q(<<"test">>)),
- declare_queue(Downstream, q(<<"test">>)),
+ SourceArgs = ?config(source_queue_args, Config),
+ TargetArgs = ?config(target_queue_args, Config),
+ maybe_declare_queue(Config, Upstream, q(<<"test">>, SourceArgs)),
+ maybe_declare_queue(Config, Downstream, q(<<"test">>, TargetArgs)),
Seq = lists:seq(1, 100),
[publish(Upstream, <<>>, <<"test">>, <<"bulk">>) || _ <- Seq],
expect(Upstream, <<"test">>, repeat(25, <<"bulk">>)),
@@ -325,4 +419,9 @@ expect_no_federation(Ch, UpstreamQ, DownstreamQ) ->
expect(Ch, UpstreamQ, [<<"HELLO">>]).
upstream_downstream() ->
- [q(<<"upstream">>), q(<<"fed.downstream">>)].
+ upstream_downstream([]).
+
+upstream_downstream(Config) ->
+ SourceArgs = ?config(source_queue_args, Config),
+ TargetArgs = ?config(target_queue_args, Config),
+ [q(<<"upstream">>, SourceArgs), q(<<"fed.downstream">>, TargetArgs)].
diff --git a/deps/rabbitmq_federation/test/rabbit_federation_status_SUITE.erl b/deps/rabbitmq_federation/test/rabbit_federation_status_SUITE.erl
index 6b802a3f15..42dfe76198 100644
--- a/deps/rabbitmq_federation/test/rabbit_federation_status_SUITE.erl
+++ b/deps/rabbitmq_federation/test/rabbit_federation_status_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_status_SUITE).
diff --git a/deps/rabbitmq_federation/test/rabbit_federation_test_util.erl b/deps/rabbitmq_federation/test/rabbit_federation_test_util.erl
index 534817a2a4..448a6b9cb8 100644
--- a/deps/rabbitmq_federation/test/rabbit_federation_test_util.erl
+++ b/deps/rabbitmq_federation/test/rabbit_federation_test_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_test_util).
@@ -13,17 +13,24 @@
-compile(export_all).
+-deprecated({wait_for_federation,2,"Use rabbit_ct_helpers:await_condition or ?awaitMatch instead"}).
+
-import(rabbit_misc, [pget/2]).
setup_federation(Config) ->
+ setup_federation_with_upstream_params(Config, []).
+
+setup_federation_with_upstream_params(Config, ExtraParams) ->
rabbit_ct_broker_helpers:set_parameter(Config, 0,
<<"federation-upstream">>, <<"localhost">>, [
{<<"uri">>, rabbit_ct_broker_helpers:node_uri(Config, 0)},
- {<<"consumer-tag">>, <<"fed.tag">>}]),
+ {<<"consumer-tag">>, <<"fed.tag">>}
+ ] ++ ExtraParams),
rabbit_ct_broker_helpers:set_parameter(Config, 0,
<<"federation-upstream">>, <<"local5673">>, [
- {<<"uri">>, <<"amqp://localhost:1">>}]),
+ {<<"uri">>, <<"amqp://localhost:1">>}
+ ] ++ ExtraParams),
rabbit_ct_broker_helpers:set_parameter(Config, 0,
<<"federation-upstream-set">>, <<"upstream">>, [
@@ -176,10 +183,10 @@ expect([], _Timeout) ->
ok;
expect(Payloads, Timeout) ->
receive
- {#'basic.deliver'{}, #amqp_msg{payload = Payload}} ->
+ {#'basic.deliver'{delivery_tag = DTag}, #amqp_msg{payload = Payload}} ->
case lists:member(Payload, Payloads) of
true ->
- ct:pal("Consumed a message: ~p", [Payload]),
+ ct:pal("Consumed a message: ~p ~p left: ~p", [Payload, DTag, length(Payloads) - 1]),
expect(Payloads -- [Payload], Timeout);
false -> ?assert(false, rabbit_misc:format("received an unexpected payload ~p", [Payload]))
end
@@ -327,28 +334,41 @@ links(#'exchange.declare'{exchange = Name}) ->
xr(Name) -> rabbit_misc:r(<<"/">>, exchange, Name).
-with_ch(Config, Fun, Qs) ->
- Ch = rabbit_ct_client_helpers:open_channel(Config, 0),
- declare_all(Ch, Qs),
+with_ch(Config, Fun, Methods) ->
+ Ch = rabbit_ct_client_helpers:open_channel(Config),
+ declare_all(Config, Ch, Methods),
%% Clean up queues even after test failure.
try
Fun(Ch)
after
- delete_all(Ch, Qs),
+ delete_all(Ch, Methods),
rabbit_ct_client_helpers:close_channel(Ch)
end,
ok.
-declare_all(Ch, Qs) -> [declare_queue(Ch, Q) || Q <- Qs].
-delete_all(Ch, Qs) ->
- [delete_queue(Ch, Q) || #'queue.declare'{queue = Q} <- Qs].
+declare_all(Config, Ch, Methods) -> [maybe_declare_queue(Config, Ch, Op) || Op <- Methods].
+delete_all(Ch, Methods) ->
+ [delete_queue(Ch, Q) || #'queue.declare'{queue = Q} <- Methods].
-declare_queue(Ch, Q) ->
- amqp_channel:call(Ch, Q).
+maybe_declare_queue(Config, Ch, Method) ->
+ OneOffCh = rabbit_ct_client_helpers:open_channel(Config),
+ try
+ amqp_channel:call(OneOffCh, Method#'queue.declare'{passive = true})
+ catch exit:{{shutdown, {server_initiated_close, ?NOT_FOUND, _Message}}, _} ->
+ amqp_channel:call(Ch, Method)
+ after
+ catch rabbit_ct_client_helpers:close_channel(OneOffCh)
+ end.
delete_queue(Ch, Q) ->
amqp_channel:call(Ch, #'queue.delete'{queue = Q}).
q(Name) ->
+ q(Name, []).
+
+q(Name, undefined) ->
+ q(Name, []);
+q(Name, Args) ->
#'queue.declare'{queue = Name,
- durable = true}.
+ durable = true,
+ arguments = Args}.
diff --git a/deps/rabbitmq_federation/test/restart_federation_link_command_SUITE.erl b/deps/rabbitmq_federation/test/restart_federation_link_command_SUITE.erl
index 511bae6540..567cc081df 100644
--- a/deps/rabbitmq_federation/test/restart_federation_link_command_SUITE.erl
+++ b/deps/rabbitmq_federation/test/restart_federation_link_command_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(restart_federation_link_command_SUITE).
diff --git a/deps/rabbitmq_federation/test/unit_SUITE.erl b/deps/rabbitmq_federation/test/unit_SUITE.erl
index 7d0707f96c..14d072edb7 100644
--- a/deps/rabbitmq_federation/test/unit_SUITE.erl
+++ b/deps/rabbitmq_federation/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
diff --git a/deps/rabbitmq_federation/test/unit_inbroker_SUITE.erl b/deps/rabbitmq_federation/test/unit_inbroker_SUITE.erl
index f65dffbe8e..cd47ca459a 100644
--- a/deps/rabbitmq_federation/test/unit_inbroker_SUITE.erl
+++ b/deps/rabbitmq_federation/test/unit_inbroker_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_inbroker_SUITE).
diff --git a/deps/rabbitmq_federation_management/BUILD.bazel b/deps/rabbitmq_federation_management/BUILD.bazel
new file mode 100644
index 0000000000..b818b1213f
--- /dev/null
+++ b/deps/rabbitmq_federation_management/BUILD.bazel
@@ -0,0 +1,64 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_federation_management"
+
+APP_DESCRIPTION = "RabbitMQ Federation Management"
+
+APP_MODULE = "rabbit_federation_app"
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/rabbitmq_management:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_federation:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_federation_management"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "federation_mgmt_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_federation_management/Makefile b/deps/rabbitmq_federation_management/Makefile
index 7f81abbf44..4442a79502 100644
--- a/deps/rabbitmq_federation_management/Makefile
+++ b/deps/rabbitmq_federation_management/Makefile
@@ -17,5 +17,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_federation_management/erlang.mk b/deps/rabbitmq_federation_management/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_federation_management/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs b/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs
index 5b3e14d063..838eac1eb3 100644
--- a/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs
+++ b/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs
@@ -45,7 +45,7 @@
<td class="r"><%= fmt_time(upstream.value['message-ttl'], 'ms') %></td>
<td class="r"><%= fmt_string(upstream.value['ha-policy']) %></td>
<td class="r"><%= fmt_string(upstream.value['queue']) %></td>
- <td class="r"><%= upstream.value['consumer-tag'] %></td>
+ <td class="r"><%= fmt_string(upstream.value['consumer-tag']) %></td>
</tr>
<% } %>
</tbody>
diff --git a/deps/rabbitmq_federation_management/rabbitmq-components.mk b/deps/rabbitmq_federation_management/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_federation_management/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl b/deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl
index bf3dbbf44a..15d90e68db 100644
--- a/deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl
+++ b/deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_federation_mgmt).
diff --git a/deps/rabbitmq_federation_management/test/federation_mgmt_SUITE.erl b/deps/rabbitmq_federation_management/test/federation_mgmt_SUITE.erl
index a5a8efcf77..f479763400 100644
--- a/deps/rabbitmq_federation_management/test/federation_mgmt_SUITE.erl
+++ b/deps/rabbitmq_federation_management/test/federation_mgmt_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(federation_mgmt_SUITE).
@@ -27,6 +27,11 @@ groups() ->
]}
].
+suite() ->
+ [
+ {timetrap, {minutes, 5}}
+ ].
+
%% -------------------------------------------------------------------
%% Testsuite setup/teardown.
%% -------------------------------------------------------------------
@@ -244,7 +249,8 @@ assert_code(CodeExp, CodeAct, Type, Path, Body) ->
end.
decode(?OK, _Headers, ResBody) ->
- cleanup(rabbit_json:decode(rabbit_data_coercion:to_binary(ResBody)));
+ JSON = rabbit_data_coercion:to_binary(ResBody),
+ cleanup(rabbit_json:decode(JSON));
decode(_, Headers, _ResBody) -> Headers.
cleanup(L) when is_list(L) ->
diff --git a/deps/rabbitmq_jms_topic_exchange/BUILD.bazel b/deps/rabbitmq_jms_topic_exchange/BUILD.bazel
new file mode 100644
index 0000000000..66fb87f83c
--- /dev/null
+++ b/deps/rabbitmq_jms_topic_exchange/BUILD.bazel
@@ -0,0 +1,69 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_jms_topic_exchange"
+
+APP_DESCRIPTION = "RabbitMQ JMS topic selector exchange plugin"
+
+APP_MODULE = "rabbit_federation_app"
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_jms_topic_exchange"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rjms_topic_selector_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "rjms_topic_selector_unit_SUITE",
+ size = "small",
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "sjx_evaluation_SUITE",
+ size = "small",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_jms_topic_exchange/CONTRIBUTING.md b/deps/rabbitmq_jms_topic_exchange/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_jms_topic_exchange/CONTRIBUTING.md
+++ b/deps/rabbitmq_jms_topic_exchange/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_jms_topic_exchange/LICENSES.txt b/deps/rabbitmq_jms_topic_exchange/LICENSES.txt
deleted file mode 100644
index aca032b7ad..0000000000
--- a/deps/rabbitmq_jms_topic_exchange/LICENSES.txt
+++ /dev/null
@@ -1,736 +0,0 @@
-open_source_license.txt
-
-JMS Client for Pivotal RabbitMQ 1.4.6 GA
-
-===========================================================================
-
-The following copyright statements and licenses apply to various open
-source software packages (or portions thereof) that are distributed with
-this Pivotal Software, Inc. Product.
-
-The Pivotal Product may also include other Pivotal components, which may
-contain additional open source software packages. One or more such
-open_source_licenses.txt files may therefore accompany this Pivotal
-Product.
-
-The Pivotal Product that includes this file does not necessarily use all
-the open source software packages referred to below and may also only
-use portions of a given package.
-
-
-=============== TABLE OF CONTENTS =============================
-
-
-The following is a listing of the open source components detailed in
-this document. This list is provided for your convenience; please read
-further if you wish to review the copyright notice(s) and the full text
-of the license associated with each component.
-
-
-
-
-SECTION 1: BSD-STYLE, MIT-STYLE, OR SIMILAR STYLE LICENSES
-
- >>> slf4j-api-1.7.5
-
-
-
-SECTION 2: Apache License, V2.0
-
- >>> geronimo-jms_1.1_spec-1.1.1
-
-
-
-SECTION 3: Mozilla Public License, v2.0
-
- >>> amqp-client
-
-
-
-APPENDIX. Standard License Files
-
- >>> Apache License, V2.0
-
- >>> Mozilla Public License, v2.0
-
-
-
---------------- SECTION 1: BSD-STYLE, MIT-STYLE, OR SIMILAR STYLE LICENSES ----------
-
-BSD-STYLE, MIT-STYLE, OR SIMILAR STYLE LICENSES are applicable to the following component(s).
-
-
->>> slf4j-api-1.7.5
-
-Copyright (c) 2004-2011 QOS.ch
-All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-
---------------- SECTION 2: Apache License, V2.0 ----------
-
-Apache License, V2.0 is applicable to the following component(s).
-
-
->>> geronimo-jms_1.1_spec-1.1.1
-
-Apache Geronimo
-Copyright 2003-2008 The Apache Software Foundation
-
-This product includes software developed by
-The Apache Software Foundation (https://www.apache.org/).
-
-
---------------- SECTION 3: Mozilla Public License, v2.0 ----------
-
-Mozilla Public License, v2.0 is applicable to the following component(s).
-
-
->>> amqp-client
-
-// The contents of this file are subject to the Mozilla Public License
-// Version 1.1 (the "License"); you may not use this file except in
-// compliance with the License. You may obtain a copy of the License
-// at https://www.mozilla.org/MPL/
-//
-// Software distributed under the License is distributed on an "AS IS"
-// basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-// the License for the specific language governing rights and
-// limitations under the License.
-//
-// The Original Code is RabbitMQ.
-//
-// The Initial Developer of the Original Code is GoPivotal, Inc.
-// Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-
-ADDITIONAL LICENSE INFORMATION:
-
-> Apache 2.0
-
-amqp-client-3.5.6-sources.jar\com\rabbitmq\tools\json\JSONWriter.java
-
-/*
- Copyright (c) 2006-2007 Frank Carver
- Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All Rights Reserved
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
-
-> Public Domain
-
-amqp-client-sources.jar\com\rabbitmq\client\impl\VariableLinkedBlockingQueue.java
-
-/*
- * Modifications Copyright 2015 Pivotal Software, Inc and licenced as per
- * the rest of the RabbitMQ Java client.
- */
-* Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * https://creativecommons.org/licenses/publicdomain
- */
-
-
-=============== APPENDIX. Standard License Files ==============
-
-
-
---------------- SECTION 1: Apache License, V2.0 -----------
-
-Apache License
-
-Version 2.0, January 2004
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-"License" shall mean the terms and conditions for use, reproduction,
-and distribution as defined by Sections 1 through 9 of this document.
-
-"Licensor" shall mean the copyright owner or entity authorized by the
-copyright owner that is granting the License.
-
-"Legal Entity" shall mean the union of the acting entity and all other
-entities that control, are controlled by, or are under common control
-with that entity. For the purposes of this definition, "control" means
-(i) the power, direct or indirect, to cause the direction or management
-of such entity, whether by contract or otherwise, or (ii) ownership
-of fifty percent (50%) or more of the outstanding shares, or (iii)
-beneficial ownership of such entity.
-
-"You" (or "Your") shall mean an individual or Legal Entity exercising
-permissions granted by this License.
-
-"Source" form shall mean the preferred form for making modifications,
-including but not limited to software source code, documentation source,
-and configuration files.
-
-"Object" form shall mean any form resulting from mechanical transformation
-or translation of a Source form, including but not limited to compiled
-object code, generated documentation, and conversions to other media
-types.
-
-"Work" shall mean the work of authorship, whether in Source or
-Object form, made available under the License, as indicated by a copyright
-notice that is included in or attached to the work (an example is provided
-in the Appendix below).
-
-"Derivative Works" shall mean any work, whether in Source or Object form,
-that is based on (or derived from) the Work and for which the editorial
-revisions, annotations, elaborations, or other modifications represent,
-as a whole, an original work of authorship. For the purposes of this
-License, Derivative Works shall not include works that remain separable
-from, or merely link (or bind by name) to the interfaces of, the Work
-and Derivative Works thereof.
-
-"Contribution" shall mean any work of authorship, including the
-original version of the Work and any modifications or additions to
-that Work or Derivative Works thereof, that is intentionally submitted
-to Licensor for inclusion in the Work by the copyright owner or by an
-individual or Legal Entity authorized to submit on behalf of the copyright
-owner. For the purposes of this definition, "submitted" means any form of
-electronic, verbal, or written communication sent to the Licensor or its
-representatives, including but not limited to communication on electronic
-mailing lists, source code control systems, and issue tracking systems
-that are managed by, or on behalf of, the Licensor for the purpose of
-discussing and improving the Work, but excluding communication that is
-conspicuously marked or otherwise designated in writing by the copyright
-owner as "Not a Contribution."
-
-"Contributor" shall mean Licensor and any individual or Legal Entity
-on behalf of whom a Contribution has been received by Licensor and
-subsequently incorporated within the Work.
-
-2. Grant of Copyright License.
-Subject to the terms and conditions of this License, each Contributor
-hereby grants to You a perpetual, worldwide, non-exclusive, no-charge,
-royalty-free, irrevocable copyright license to reproduce, prepare
-Derivative Works of, publicly display, publicly perform, sublicense, and
-distribute the Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License.
-Subject to the terms and conditions of this License, each Contributor
-hereby grants to You a perpetual, worldwide, non-exclusive, no-charge,
-royalty- free, irrevocable (except as stated in this section) patent
-license to make, have made, use, offer to sell, sell, import, and
-otherwise transfer the Work, where such license applies only to those
-patent claims licensable by such Contributor that are necessarily
-infringed by their Contribution(s) alone or by combination of
-their Contribution(s) with the Work to which such Contribution(s)
-was submitted. If You institute patent litigation against any entity
-(including a cross-claim or counterclaim in a lawsuit) alleging that the
-Work or a Contribution incorporated within the Work constitutes direct
-or contributory patent infringement, then any patent licenses granted
-to You under this License for that Work shall terminate as of the date
-such litigation is filed.
-
-4. Redistribution.
-You may reproduce and distribute copies of the Work or Derivative Works
-thereof in any medium, with or without modifications, and in Source or
-Object form, provided that You meet the following conditions:
-
- a. You must give any other recipients of the Work or Derivative Works
- a copy of this License; and
-
- b. You must cause any modified files to carry prominent notices stating
- that You changed the files; and
-
- c. You must retain, in the Source form of any Derivative Works that
- You distribute, all copyright, patent, trademark, and attribution
- notices from the Source form of the Work, excluding those notices
- that do not pertain to any part of the Derivative Works; and
-
- d. If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one of
- the following places: within a NOTICE text file distributed as part
- of the Derivative Works; within the Source form or documentation,
- if provided along with the Derivative Works; or, within a display
- generated by the Derivative Works, if and wherever such third-party
- notices normally appear. The contents of the NOTICE file are for
- informational purposes only and do not modify the License. You
- may add Your own attribution notices within Derivative Works that
- You distribute, alongside or as an addendum to the NOTICE text
- from the Work, provided that such additional attribution notices
- cannot be construed as modifying the License. You may add Your own
- copyright statement to Your modifications and may provide additional
- or different license terms and conditions for use, reproduction, or
- distribution of Your modifications, or for any such Derivative Works
- as a whole, provided Your use, reproduction, and distribution of the
- Work otherwise complies with the conditions stated in this License.
-
-5. Submission of Contributions.
-Unless You explicitly state otherwise, any Contribution intentionally
-submitted for inclusion in the Work by You to the Licensor shall be
-under the terms and conditions of this License, without any additional
-terms or conditions. Notwithstanding the above, nothing herein shall
-supersede or modify the terms of any separate license agreement you may
-have executed with Licensor regarding such Contributions.
-
-6. Trademarks.
-This License does not grant permission to use the trade names, trademarks,
-service marks, or product names of the Licensor, except as required for
-reasonable and customary use in describing the origin of the Work and
-reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty.
-Unless required by applicable law or agreed to in writing, Licensor
-provides the Work (and each Contributor provides its Contributions) on
-an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-express or implied, including, without limitation, any warranties or
-conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR
-A PARTICULAR PURPOSE. You are solely responsible for determining the
-appropriateness of using or redistributing the Work and assume any risks
-associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability.
-In no event and under no legal theory, whether in tort (including
-negligence), contract, or otherwise, unless required by applicable law
-(such as deliberate and grossly negligent acts) or agreed to in writing,
-shall any Contributor be liable to You for damages, including any direct,
-indirect, special, incidental, or consequential damages of any character
-arising as a result of this License or out of the use or inability to
-use the Work (including but not limited to damages for loss of goodwill,
-work stoppage, computer failure or malfunction, or any and all other
-commercial damages or losses), even if such Contributor has been advised
-of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability.
-While redistributing the Work or Derivative Works thereof, You may
-choose to offer, and charge a fee for, acceptance of support, warranty,
-indemnity, or other liability obligations and/or rights consistent with
-this License. However, in accepting such obligations, You may act only
-on Your own behalf and on Your sole responsibility, not on behalf of
-any other Contributor, and only if You agree to indemnify, defend, and
-hold each Contributor harmless for any liability incurred by, or claims
-asserted against, such Contributor by reason of your accepting any such
-warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-
-
---------------- SECTION 2: Mozilla Public License, V2.0 -----------
-
-Mozilla Public License Version 2.0
-==================================
-
-1. Definitions
---------------
-
-1.1. "Contributor"
- means each individual or legal entity that creates, contributes to
- the creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
- means the combination of the Contributions of others (if any) used
- by a Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
- means Source Code Form to which the initial Contributor has attached
- the notice in Exhibit A, the Executable Form of such Source Code
- Form, and Modifications of such Source Code Form, in each case
- including portions thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- (a) that the initial Contributor has attached the notice described
- in Exhibit B to the Covered Software; or
-
- (b) that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the
- terms of a Secondary License.
-
-1.6. "Executable Form"
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
- means a work that combines Covered Software with other material, in
- a separate file or files, that is not Covered Software.
-
-1.8. "License"
- means this document.
-
-1.9. "Licensable"
- means having the right to grant, to the maximum extent possible,
- whether at the time of the initial grant or subsequently, any and
- all of the rights conveyed by this License.
-
-1.10. "Modifications"
- means any of the following:
-
- (a) any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered
- Software; or
-
- (b) any new file in Source Code Form that contains any Covered
- Software.
-
-1.11. "Patent Claims" of a Contributor
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the
- License, by the making, using, selling, offering for sale, having
- made, import, or transfer of either its Contributions or its
- Contributor Version.
-
-1.12. "Secondary License"
- means either the GNU General Public License, Version 2.0, the GNU
- Lesser General Public License, Version 2.1, the GNU Affero General
- Public License, Version 3.0, or any later versions of those
- licenses.
-
-1.13. "Source Code Form"
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that
- controls, is controlled by, or is under common control with You. For
- purposes of this definition, "control" means (a) the power, direct
- or indirect, to cause the direction or management of such entity,
- whether by contract or otherwise, or (b) ownership of more than
- fifty percent (50%) of the outstanding shares or beneficial
- ownership of such entity.
-
-2. License Grants and Conditions
---------------------------------
-
-2.1. Grants
-
-Each Contributor hereby grants You a world-wide, royalty-free,
-non-exclusive license:
-
-(a) under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
-(b) under Patent Claims of such Contributor to make, use, sell, offer
- for sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
-The licenses granted in Section 2.1 with respect to any Contribution
-become effective for each Contribution on the date the Contributor first
-distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
-The licenses granted in this Section 2 are the only rights granted under
-this License. No additional rights or licenses will be implied from the
-distribution or licensing of Covered Software under this License.
-Notwithstanding Section 2.1(b) above, no patent license is granted by a
-Contributor:
-
-(a) for any code that a Contributor has removed from Covered Software;
- or
-
-(b) for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
-(c) under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
-This License does not grant any rights in the trademarks, service marks,
-or logos of any Contributor (except as may be necessary to comply with
-the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
-No Contributor makes additional grants as a result of Your choice to
-distribute the Covered Software under a subsequent version of this
-License (see Section 10.2) or under the terms of a Secondary License (if
-permitted under the terms of Section 3.3).
-
-2.5. Representation
-
-Each Contributor represents that the Contributor believes its
-Contributions are its original creation(s) or it has sufficient rights
-to grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
-This License is not intended to limit any rights You have under
-applicable copyright doctrines of fair use, fair dealing, or other
-equivalents.
-
-2.7. Conditions
-
-Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
-in Section 2.1.
-
-3. Responsibilities
--------------------
-
-3.1. Distribution of Source Form
-
-All distribution of Covered Software in Source Code Form, including any
-Modifications that You create or to which You contribute, must be under
-the terms of this License. You must inform recipients that the Source
-Code Form of the Covered Software is governed by the terms of this
-License, and how they can obtain a copy of this License. You may not
-attempt to alter or restrict the recipients' rights in the Source Code
-Form.
-
-3.2. Distribution of Executable Form
-
-If You distribute Covered Software in Executable Form then:
-
-(a) such Covered Software must also be made available in Source Code
- Form, as described in Section 3.1, and You must inform recipients of
- the Executable Form how they can obtain a copy of such Source Code
- Form by reasonable means in a timely manner, at a charge no more
- than the cost of distribution to the recipient; and
-
-(b) You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter
- the recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
-You may create and distribute a Larger Work under terms of Your choice,
-provided that You also comply with the requirements of this License for
-the Covered Software. If the Larger Work is a combination of Covered
-Software with a work governed by one or more Secondary Licenses, and the
-Covered Software is not Incompatible With Secondary Licenses, this
-License permits You to additionally distribute such Covered Software
-under the terms of such Secondary License(s), so that the recipient of
-the Larger Work may, at their option, further distribute the Covered
-Software under the terms of either this License or such Secondary
-License(s).
-
-3.4. Notices
-
-You may not remove or alter the substance of any license notices
-(including copyright notices, patent notices, disclaimers of warranty,
-or limitations of liability) contained within the Source Code Form of
-the Covered Software, except that You may alter any license notices to
-the extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
-You may choose to offer, and to charge a fee for, warranty, support,
-indemnity or liability obligations to one or more recipients of Covered
-Software. However, You may do so only on Your own behalf, and not on
-behalf of any Contributor. You must make it absolutely clear that any
-such warranty, support, indemnity, or liability obligation is offered by
-You alone, and You hereby agree to indemnify every Contributor for any
-liability incurred by such Contributor as a result of warranty, support,
-indemnity or liability terms You offer. You may include additional
-disclaimers of warranty and limitations of liability specific to any
-jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
----------------------------------------------------
-
-If it is impossible for You to comply with any of the terms of this
-License with respect to some or all of the Covered Software due to
-statute, judicial order, or regulation then You must: (a) comply with
-the terms of this License to the maximum extent possible; and (b)
-describe the limitations and the code they affect. Such description must
-be placed in a text file included with all distributions of the Covered
-Software under this License. Except to the extent prohibited by statute
-or regulation, such description must be sufficiently detailed for a
-recipient of ordinary skill to be able to understand it.
-
-5. Termination
---------------
-
-5.1. The rights granted under this License will terminate automatically
-if You fail to comply with any of its terms. However, if You become
-compliant, then the rights granted under this License from a particular
-Contributor are reinstated (a) provisionally, unless and until such
-Contributor explicitly and finally terminates Your grants, and (b) on an
-ongoing basis, if such Contributor fails to notify You of the
-non-compliance by some reasonable means prior to 60 days after You have
-come back into compliance. Moreover, Your grants from a particular
-Contributor are reinstated on an ongoing basis if such Contributor
-notifies You of the non-compliance by some reasonable means, this is the
-first time You have received notice of non-compliance with this License
-from such Contributor, and You become compliant prior to 30 days after
-Your receipt of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
-infringement claim (excluding declaratory judgment actions,
-counter-claims, and cross-claims) alleging that a Contributor Version
-directly or indirectly infringes any patent, then the rights granted to
-You by any and all Contributors for the Covered Software under Section
-2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all
-end user license agreements (excluding distributors and resellers) which
-have been validly granted by You or Your distributors under this License
-prior to termination shall survive termination.
-
-************************************************************************
-* *
-* 6. Disclaimer of Warranty *
-* ------------------------- *
-* *
-* Covered Software is provided under this License on an "as is" *
-* basis, without warranty of any kind, either expressed, implied, or *
-* statutory, including, without limitation, warranties that the *
-* Covered Software is free of defects, merchantable, fit for a *
-* particular purpose or non-infringing. The entire risk as to the *
-* quality and performance of the Covered Software is with You. *
-* Should any Covered Software prove defective in any respect, You *
-* (not any Contributor) assume the cost of any necessary servicing, *
-* repair, or correction. This disclaimer of warranty constitutes an *
-* essential part of this License. No use of any Covered Software is *
-* authorized under this License except under this disclaimer. *
-* *
-************************************************************************
-
-************************************************************************
-* *
-* 7. Limitation of Liability *
-* -------------------------- *
-* *
-* Under no circumstances and under no legal theory, whether tort *
-* (including negligence), contract, or otherwise, shall any *
-* Contributor, or anyone who distributes Covered Software as *
-* permitted above, be liable to You for any direct, indirect, *
-* special, incidental, or consequential damages of any character *
-* including, without limitation, damages for lost profits, loss of *
-* goodwill, work stoppage, computer failure or malfunction, or any *
-* and all other commercial damages or losses, even if such party *
-* shall have been informed of the possibility of such damages. This *
-* limitation of liability shall not apply to liability for death or *
-* personal injury resulting from such party's negligence to the *
-* extent applicable law prohibits such limitation. Some *
-* jurisdictions do not allow the exclusion or limitation of *
-* incidental or consequential damages, so this exclusion and *
-* limitation may not apply to You. *
-* *
-************************************************************************
-
-8. Litigation
--------------
-
-Any litigation relating to this License may be brought only in the
-courts of a jurisdiction where the defendant maintains its principal
-place of business and such litigation shall be governed by laws of that
-jurisdiction, without reference to its conflict-of-law provisions.
-Nothing in this Section shall prevent a party's ability to bring
-cross-claims or counter-claims.
-
-9. Miscellaneous
-----------------
-
-This License represents the complete agreement concerning the subject
-matter hereof. If any provision of this License is held to be
-unenforceable, such provision shall be reformed only to the extent
-necessary to make it enforceable. Any law or regulation which provides
-that the language of a contract shall be construed against the drafter
-shall not be used to construe this License against a Contributor.
-
-10. Versions of the License
----------------------------
-
-10.1. New Versions
-
-Mozilla Foundation is the license steward. Except as provided in Section
-10.3, no one other than the license steward has the right to modify or
-publish new versions of this License. Each version will be given a
-distinguishing version number.
-
-10.2. Effect of New Versions
-
-You may distribute the Covered Software under the terms of the version
-of the License under which You originally received the Covered Software,
-or under the terms of any subsequent version published by the license
-steward.
-
-10.3. Modified Versions
-
-If you create software not governed by this License, and you want to
-create a new license for such software, you may create and use a
-modified version of this License if you rename the license and remove
-any references to the name of the license steward (except to note that
-such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
-Licenses
-
-If You choose to distribute Source Code Form that is Incompatible With
-Secondary Licenses under the terms of this version of the License, the
-notice described in Exhibit B of this License must be attached.
-
-Exhibit A - Source Code Form License Notice
--------------------------------------------
-
- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular
-file, then You may include the notice in a location (such as a LICENSE
-file in a relevant directory) where a recipient would be likely to look
-for such a notice.
-
-Copyright (c) 2007-2020 VMware, Inc. or its affiliates.
-
-
-
-===========================================================================
-
-To the extent any open source components are licensed under the
-GPL and/or LGPL, or other similar licenses that require the
-source code and/or modifications to source code to be made
-available (as would be noted above), you may obtain a copy of
-the source code corresponding to the binaries for such open
-source components and modifications thereto, if any, (the
-"Source Files"), by downloading the Source Files from Pivotal's website at
-https://tanzu.vmware.com/open-source, or by sending a request,
-with your name and address to: VMware, Inc., 3401 Hillview Ave,
-Palo Alto, CA 94304, Attention: General Counsel. All such requests should
-clearly specify: OPEN SOURCE FILES REQUEST, Attention General Counsel.
-VMware shall mail a copy of the Source Files to you on a CD or equivalent physical medium.
-This offer to obtain a copy of the Source Files is valid for three
-years from the date you acquired this Software product.
-Alternatively, the Source Files may accompany the VMware product.
-
-
-[RABBITJMS146GASS110315] \ No newline at end of file
diff --git a/deps/rabbitmq_jms_topic_exchange/Makefile b/deps/rabbitmq_jms_topic_exchange/Makefile
index c5b45c1b3c..c5d4a51afd 100644
--- a/deps/rabbitmq_jms_topic_exchange/Makefile
+++ b/deps/rabbitmq_jms_topic_exchange/Makefile
@@ -13,5 +13,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_jms_topic_exchange/erlang.mk b/deps/rabbitmq_jms_topic_exchange/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_jms_topic_exchange/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_jms_topic_exchange/rabbitmq-components.mk b/deps/rabbitmq_jms_topic_exchange/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_jms_topic_exchange/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_jms_topic_exchange/src/rabbit_jms_topic_exchange.erl b/deps/rabbitmq_jms_topic_exchange/src/rabbit_jms_topic_exchange.erl
index f9addb8430..131351d36d 100644
--- a/deps/rabbitmq_jms_topic_exchange/src/rabbit_jms_topic_exchange.erl
+++ b/deps/rabbitmq_jms_topic_exchange/src/rabbit_jms_topic_exchange.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2012-2021 VMware, Inc. or its affiliates. All rights reserved.
%% -----------------------------------------------------------------------------
%% JMS on Rabbit Selector Exchange plugin
diff --git a/deps/rabbitmq_jms_topic_exchange/src/sjx_evaluator.erl b/deps/rabbitmq_jms_topic_exchange/src/sjx_evaluator.erl
index ec5f574291..19644f3b39 100644
--- a/deps/rabbitmq_jms_topic_exchange/src/sjx_evaluator.erl
+++ b/deps/rabbitmq_jms_topic_exchange/src/sjx_evaluator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2012-2021 VMware, Inc. or its affiliates. All rights reserved.
%% -----------------------------------------------------------------------------
%% Derived from works which were:
%% Copyright (c) 2002, 2012 Tim Watson (watson.timothy@gmail.com)
diff --git a/deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_SUITE.erl b/deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_SUITE.erl
index 808e1b1db6..8765e9e3d0 100644
--- a/deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_SUITE.erl
+++ b/deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2013-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2013-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rjms_topic_selector_SUITE).
diff --git a/deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_unit_SUITE.erl b/deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_unit_SUITE.erl
index 63d442e973..47123f097a 100644
--- a/deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_unit_SUITE.erl
+++ b/deps/rabbitmq_jms_topic_exchange/test/rjms_topic_selector_unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2012-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2012-2021 VMware, Inc. or its affiliates. All rights reserved.
%% -----------------------------------------------------------------------------
%% Unit test file for RJMS Topic Selector plugin
diff --git a/deps/rabbitmq_management/.gitignore b/deps/rabbitmq_management/.gitignore
index ab433eaff0..4d41f1368b 100644
--- a/deps/rabbitmq_management/.gitignore
+++ b/deps/rabbitmq_management/.gitignore
@@ -22,14 +22,6 @@ MnesiaCore.*
rabbitmq_management.d
.rabbitmq_management.plt
-# Common Test
-ct_run*
-all_runs.html
-index.html
-ct_default.css
-ct_log_cache
-variables-ct*
-
*.coverdata
test/config_schema_SUITE_data/schema/
diff --git a/deps/rabbitmq_management/BUILD.bazel b/deps/rabbitmq_management/BUILD.bazel
new file mode 100644
index 0000000000..742710002f
--- /dev/null
+++ b/deps/rabbitmq_management/BUILD.bazel
@@ -0,0 +1,203 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "APP_VERSION",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_management"
+
+APP_DESCRIPTION = "RabbitMQ Management Console"
+
+APP_MODULE = "rabbit_mgmt_app"
+
+EXTRA_APPS = [
+ "mnesia",
+ "ranch",
+ "ssl",
+ "crypto",
+ "public_key",
+]
+
+APP_ENV = """[
+ {http_log_dir, none},
+ {load_definitions, none},
+ {management_db_cache_multiplier, 5},
+ {process_stats_gc_timeout, 300000},
+ {stats_event_max_backlog, 250},
+
+ {cors_allow_origins, []},
+ {cors_max_age, 1800},
+ {content_security_policy, "script-src 'self' 'unsafe-eval' 'unsafe-inline'; object-src 'self'"}
+ ]"""
+
+FIRST_SRCS = [
+ "src/rabbit_mgmt_extension.erl",
+]
+
+DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbitmq_web_dispatch:bazel_erlang_lib",
+ "@cowboy//:bazel_erlang_lib",
+ "@cowlib//:bazel_erlang_lib",
+]
+
+genrule(
+ name = "rabbitmqadmin",
+ srcs = ["bin/rabbitmqadmin"],
+ outs = ["priv/www/cli/rabbitmqadmin"],
+ cmd = """set -euxo pipefail
+
+sed 's/%%VSN%%/{}/' $< > $@
+""".format(APP_VERSION),
+)
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ extra_apps = EXTRA_APPS,
+ extra_priv = [":rabbitmqadmin"],
+ first_srcs = FIRST_SRCS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+erlc(
+ name = "rabbit_mgmt_runtime_parameters_util",
+ testonly = True,
+ srcs = [
+ "test/rabbit_mgmt_runtime_parameters_util.erl",
+ ],
+ dest = "test",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+)
+
+PACKAGE = "deps/rabbitmq_management"
+
+suites = [
+ rabbitmq_suite(
+ name = "cache_SUITE",
+ size = "small",
+ runtime_deps = [
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ],
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "clustering_prop_SUITE",
+ size = "large",
+ deps = [
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "clustering_SUITE",
+ flaky = True,
+ deps = [
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_suite(
+ name = "listener_config_SUITE",
+ size = "small",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_mgmt_http_health_checks_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_mgmt_http_SUITE",
+ size = "large",
+ additional_beam = [
+ ":rabbit_mgmt_runtime_parameters_util",
+ ],
+ shard_count = 3,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_mgmt_only_http_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_mgmt_rabbitmqadmin_SUITE",
+ additional_beam = [
+ ":rabbit_mgmt_runtime_parameters_util",
+ ],
+ data = [
+ ":bin/rabbitmqadmin",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "rabbit_mgmt_stats_SUITE",
+ size = "small",
+ runtime_deps = [
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ],
+ deps = [
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_mgmt_test_db_SUITE",
+ deps = [
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "rabbit_mgmt_test_unit_SUITE",
+ size = "small",
+ ),
+ rabbitmq_suite(
+ name = "stats_SUITE",
+ size = "small",
+ deps = [
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_management/Makefile b/deps/rabbitmq_management/Makefile
index 2704f1f5d9..8168111e6a 100644
--- a/deps/rabbitmq_management/Makefile
+++ b/deps/rabbitmq_management/Makefile
@@ -37,8 +37,8 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
# --------------------------------------------------------------------
# Distribution.
diff --git a/deps/rabbitmq_management/README.md b/deps/rabbitmq_management/README.md
index 8073b31f95..189b9a2b5c 100644
--- a/deps/rabbitmq_management/README.md
+++ b/deps/rabbitmq_management/README.md
@@ -14,7 +14,7 @@ it has to be [enabled](https://www.rabbitmq.com/plugins.html#basics) before it c
## Documentation
* [RabbitMQ management UI documentation](https://www.rabbitmq.com/management.html).
- * [HTTP API documentation](https://www.rabbitmq.com/management.html#http-api) and [reference](https://raw.githack.com/rabbitmq/rabbitmq-management/rabbitmq_v3_6_9/priv/www/api/index.html)
+ * [HTTP API documentation](https://www.rabbitmq.com/management.html#http-api) and [reference](https://rawcdn.githack.com/rabbitmq/rabbitmq-server/master/deps/rabbitmq_management/priv/www/api/index.html)
## Copyright
diff --git a/deps/rabbitmq_management/erlang.mk b/deps/rabbitmq_management/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_management/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_management/include/rabbit_mgmt.hrl b/deps/rabbitmq_management/include/rabbit_mgmt.hrl
index fc07271dbe..7372f113ac 100644
--- a/deps/rabbitmq_management/include/rabbit_mgmt.hrl
+++ b/deps/rabbitmq_management/include/rabbit_mgmt.hrl
@@ -1,19 +1,13 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License at
-%% https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%% License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Original Code is RabbitMQ Management Console.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(AUTH_REALM, "Basic realm=\"RabbitMQ Management\"").
-define(HEALTH_CHECK_FAILURE_STATUS, 503).
+
+-define(MANAGEMENT_PG_SCOPE, rabbitmq_management).
+-define(MANAGEMENT_PG_GROUP, management_db).
diff --git a/deps/rabbitmq_management/priv/www/api/index.html b/deps/rabbitmq_management/priv/www/api/index.html
index a6e4921544..52b450ff25 100644
--- a/deps/rabbitmq_management/priv/www/api/index.html
+++ b/deps/rabbitmq_management/priv/www/api/index.html
@@ -309,7 +309,7 @@ vary: accept, accept-encoding, origin</pre>
<td></td>
<td></td>
<td class="path">/api/connections</td>
- <td>A list of all open connections.</td>
+ <td>A list of all open connections. Use <a href="#pagination">pagination parameters</a> to filter connections.</td>
</tr>
<tr>
<td>X</td>
@@ -317,7 +317,7 @@ vary: accept, accept-encoding, origin</pre>
<td></td>
<td></td>
<td class="path">/api/vhosts/<i>vhost</i>/connections</td>
- <td>A list of all open connections in a specific vhost.</td>
+ <td>A list of all open connections in a specific virtual host. Use <a href="#pagination">pagination parameters</a> to filter connections.</td>
</tr>
<tr>
<td>X</td>
@@ -347,7 +347,7 @@ vary: accept, accept-encoding, origin</pre>
<td></td>
<td></td>
<td class="path">/api/channels</td>
- <td>A list of all open channels.</td>
+ <td>A list of all open channels. Use <a href="#pagination">pagination parameters</a> to filter channels.</td>
</tr>
<tr>
<td>X</td>
@@ -355,7 +355,7 @@ vary: accept, accept-encoding, origin</pre>
<td></td>
<td></td>
<td class="path">/api/vhosts/<i>vhost</i>/channels</td>
- <td>A list of all open channels in a specific vhost.</td>
+ <td>A list of all open channels in a specific virtual host. Use <a href="#pagination">pagination parameters</a> to filter channels.</td>
</tr>
<tr>
<td>X</td>
@@ -387,7 +387,7 @@ vary: accept, accept-encoding, origin</pre>
<td></td>
<td></td>
<td class="path">/api/exchanges</td>
- <td>A list of all exchanges.</td>
+ <td>A list of all exchanges. Use <a href="#pagination">pagination parameters</a> to filter exchanges.</td>
</tr>
<tr>
<td>X</td>
@@ -395,7 +395,7 @@ vary: accept, accept-encoding, origin</pre>
<td></td>
<td></td>
<td class="path">/api/exchanges/<i>vhost</i></td>
- <td>A list of all exchanges in a given virtual host.</td>
+ <td>A list of all exchanges in a given virtual host. Use <a href="#pagination">pagination parameters</a> to filter exchanges.</td>
</tr>
<tr>
<td>X</td>
@@ -466,7 +466,7 @@ vary: accept, accept-encoding, origin</pre>
<td></td>
<td></td>
<td class="path">/api/queues</td>
- <td>A list of all queues.</td>
+ <td>A list of all queues. Use <a href="#pagination">pagination parameters</a> to filter queues.</td>
</tr>
<tr>
<td>X</td>
@@ -474,7 +474,7 @@ vary: accept, accept-encoding, origin</pre>
<td></td>
<td></td>
<td class="path">/api/queues/<i>vhost</i></td>
- <td>A list of all queues in a given virtual host.</td>
+ <td>A list of all queues in a given virtual host. Use <a href="#pagination">pagination parameters</a> to filter queues.</td>
</tr>
<tr>
<td>X</td>
@@ -2089,5 +2089,90 @@ or:
</td>
</tr>
</table>
+
+ <section id="pagination">
+ <h2> Pagination Parameters</h2>
+
+ The pagination can be applied to the endpoints that list
+
+ <ul>
+ <li>
+ queues
+ </li>
+ <li>
+ exchanges
+ </li>
+ <li>
+ connections
+ </li>
+ <li>
+ channels
+ </li>
+ </ul>
+
+ <p>
+ Below are the query parameters that can be used.
+
+ <table>
+ <thead>
+ <tr>Parameter Name</tr>
+ <tr>Data Type</tr>
+ <tr>Description</tr>
+ </thead>
+ <tr>
+ <td><code>page</code></td>
+ <td>Positive integer</td>
+ <td>
+ Page number
+ </td>
+ </tr>
+ <tr>
+ <td><code>page_size</code></td>
+ <td>Positive integer</td>
+ <td>
+ Number of elements for page (default value: 100)
+ </td>
+ </tr>
+ <tr>
+ <td><code>name</code></td>
+ <td>String</td>
+ <td>
+ Filter by name, for example queue name, exchange name etc..
+ </td>
+ </tr>
+ <tr>
+ <td><code>use_regex</code></td>
+ <td>Boolean</td>
+ <td>
+ Enables regular expression for the param name
+ </td>
+ </tr>
+ </table>
+ </p>
+
+ <p>
+ Examples:
+ <table>
+ <tr>
+ <td><code>http://localhost:15672/api/queues?page=1&page_size=50</code></td>
+ <td>
+ Fetches the first queue page with 50 elements
+ </td>
+ </tr>
+ <tr>
+ <td><code>http://localhost:15672/api/queues/my-vhost?page=1&page_size=100&name=&use_regex=false&pagination=true</code></td>
+ <td>
+ Filter the first queues page for the virtual host "my-vhost"
+ </td>
+ </tr>
+ <tr>
+ <td><code>http://localhost:15672/api/exchanges?page=1&page_size=100&name=%5Eamq&use_regex=true&pagination=true</code></td>
+ <td>
+ Filter the first exchanges page, 100 elements, with named filtered using the regular expression "^amq"
+ </td>
+ </tr>
+ </table>
+ </p>
+ </section>
</body>
</html>
diff --git a/deps/rabbitmq_management/priv/www/js/dispatcher.js b/deps/rabbitmq_management/priv/www/js/dispatcher.js
index d2842c2da8..e0047540e7 100644
--- a/deps/rabbitmq_management/priv/www/js/dispatcher.js
+++ b/deps/rabbitmq_management/priv/www/js/dispatcher.js
@@ -98,10 +98,22 @@ dispatcher_add(function(sammy) {
});
sammy.get('#/queues/:vhost/:name', function() {
- var path = '/queues/' + esc(this.params['vhost']) + '/' + esc(this.params['name']);
- render({'queue': {path: path,
- options: {ranges:['lengths-q', 'msg-rates-q', 'data-rates-q']}},
- 'bindings': path + '/bindings'}, 'queue', '#/queues');
+ var vhost = this.params['vhost'];
+ var queue = this.params['name'];
+ var path = '/queues/' + esc(vhost) + '/' + esc(queue);
+ var requests = {'queue': {path: path,
+ options: {ranges:['lengths-q', 'msg-rates-q', 'data-rates-q']}},
+ 'bindings': path + '/bindings'};
+ // we add extra requests that can be added by code plugged on the extension point
+ for (var i = 0; i < QUEUE_EXTRA_CONTENT_REQUESTS.length; i++) {
+ var extra = QUEUE_EXTRA_CONTENT_REQUESTS[i](vhost, queue);
+ for (key in extra) {
+ if(extra.hasOwnProperty(key)){
+ requests[key] = extra[key];
+ }
+ }
+ }
+ render(requests, 'queue', '#/queues');
});
sammy.put('#/queues', function() {
if (sync_put(this, '/queues/:vhost/:name'))
@@ -178,18 +190,26 @@ dispatcher_add(function(sammy) {
'permissions': '/permissions'}, 'users');
sammy.get('#/users/:id', function() {
var vhosts = JSON.parse(sync_get('/vhosts'));
- render({'user': '/users/' + esc(this.params['id']),
+ const current_vhost = get_pref('vhost');
+ var index_vhost = 0;
+ for (var i = 0; i < vhosts.length; i++) {
+ if (vhosts[i].name === current_vhost) {
+ index_vhost = i;
+ break;
+ }
+ }
+ render({'user': '/users/' + esc(this.params['id']),
'permissions': '/users/' + esc(this.params['id']) + '/permissions',
'topic_permissions': '/users/' + esc(this.params['id']) + '/topic-permissions',
'vhosts': '/vhosts/',
- 'exchanges': '/exchanges/' + esc(vhosts[0].name)}, 'user',
- '#/users');
+ 'exchanges': '/exchanges/' + esc(vhosts[index_vhost].name)},
+ 'user','#/users');
});
sammy.put('#/users-add', function() {
res = sync_put(this, '/users/:username');
if (res) {
if (res.http_status === 204) {
- username = res.req_params.username;
+ username = fmt_escape_html(res.req_params.username);
show_popup('warn', "Updated an existing user: '" + username + "'");
}
update();
diff --git a/deps/rabbitmq_management/priv/www/js/formatters.js b/deps/rabbitmq_management/priv/www/js/formatters.js
index 8c0f55ed93..cbd3010287 100644
--- a/deps/rabbitmq_management/priv/www/js/formatters.js
+++ b/deps/rabbitmq_management/priv/www/js/formatters.js
@@ -9,6 +9,23 @@ const PROCESS_THRESHOLDS = [[0.75, 'red'],
const TAB_HIGHLIGHTER = "\u2192";
const WHITESPACE_HIGHLIGHTER = "\u23B5";
+const CONSUMER_OWNER_FORMATTERS = [
+ {order: 10, formatter: function(consumer) {
+ if (consumer.consumer_tag.startsWith('stream.subid-')) {
+ return link_conn(consumer.channel_details.connection_name);
+ } else {
+ return undefined;
+ }
+ }},
+ {order: 100, formatter: function(consumer) {
+ return link_channel(consumer.channel_details.name);
+ }}
+];
+
+const CONSUMER_OWNER_FORMATTERS_COMPARATOR = function(formatter1, formatter2) {
+ return formatter1.order - formatter2.order;
+}
+
function fmt_string(str, unknown) {
if (unknown == undefined) {
unknown = UNKNOWN_REPR;
@@ -729,6 +746,15 @@ function highlight_extra_whitespace(str) {
}, str.replace(/\t/g, TAB_HIGHLIGHTER));
}
+function link_consumer_owner(consumer) {
+ for (var i = 0; i < CONSUMER_OWNER_FORMATTERS.length; i++) {
+ var result = CONSUMER_OWNER_FORMATTERS[i].formatter(consumer);
+ if (result != undefined) {
+ return result;
+ }
+ }
+}
+
function link_conn(name, desc) {
if (desc == undefined) {
return _link_to(short_conn(name), '#/connections/' + esc(name));
@@ -866,15 +892,18 @@ function filter_ui(items) {
}
-function paginate_header_ui(pages, context){
+function paginate_header_ui(pages, context, label){
+ if (label == undefined) {
+ label = context;
+ }
var res = '<h2 class="updatable">';
- res += ' All ' + context +' (' + pages.total_count + ((pages.filtered_count != pages.total_count) ? ', filtered down to ' + pages.filtered_count : '') + ')';
+ res += ' All ' + label +' (' + pages.total_count + ((pages.filtered_count != pages.total_count) ? ', filtered down to ' + pages.filtered_count : '') + ')';
res += '</h2>';
return res;
}
-function paginate_ui(pages, context){
- var res = paginate_header_ui(pages, context);
+function paginate_ui(pages, context, label){
+ var res = paginate_header_ui(pages, context, label);
res += '<div class="hider">';
res += '<h3>Pagination</h3>';
res += '<div class="filter">';
@@ -896,7 +925,7 @@ function paginate_ui(pages, context){
res += '</select> </th>';
res += '<th><label for="' + context +'-pageof">of </label> ' + pages.page_count +'</th>';
res += '<th><span><label for="'+ context +'-name"> - Filter: </label> <input id="'+ context +'-name" data-page-start="1" class="pagination_class pagination_class_input" type="text"';
- res += 'value = ' + fmt_filter_name_request(context, "") + '>';
+ res += 'value="' + fmt_filter_name_request(context, "") + '">';
res += '</input></th></span>';
res += '<th> <input type="checkbox" data-page-start="1" class="pagination_class pagination_class_checkbox" id="'+ context +'-filter-regex-mode"' ;
diff --git a/deps/rabbitmq_management/priv/www/js/global.js b/deps/rabbitmq_management/priv/www/js/global.js
index 730b003d65..335c3607ce 100644
--- a/deps/rabbitmq_management/priv/www/js/global.js
+++ b/deps/rabbitmq_management/priv/www/js/global.js
@@ -71,7 +71,7 @@ var ALL_COLUMNS =
['features_no_policy', 'Features (no policy)', false],
['policy', 'Policy', false],
['consumers', 'Consumer count', false],
- ['consumer_utilisation', 'Consumer utilisation', false],
+ ['consumer_capacity', 'Consumer capacity', false],
['state', 'State', true]],
'Messages': [['msgs-ready', 'Ready', true],
['msgs-unacked', 'Unacknowledged', true],
@@ -166,10 +166,15 @@ var DISABLED_STATS_COLUMNS =
var COLUMNS;
+var RENDER_CALLBACKS = {};
+
+const QUEUE_EXTRA_CONTENT = [];
+const QUEUE_EXTRA_CONTENT_REQUESTS = [];
+
// All help ? popups
var HELP = {
'delivery-limit':
- 'The number of allowed unsuccessful delivery attempts. Once a message has been delivered unsucessfully this many times it will be dropped or dead-lettered, depending on the queue configuration.',
+ 'The number of allowed unsuccessful delivery attempts. Once a message has been delivered unsuccessfully this many times it will be dropped or dead-lettered, depending on the queue configuration.',
'exchange-auto-delete':
'If yes, the exchange will delete itself after at least one queue or exchange has been bound to this one, and then all queues or exchanges have been unbound.',
@@ -201,8 +206,8 @@ var HELP = {
'queue-max-age':
'How long a message published to a stream queue can live before it is discarded.',
- 'queue-max-segment-size':
- 'Total segment size for stream segments on disk.<br/>(Sets the x-max-segment-size argument.)',
+ 'queue-stream-max-segment-size-bytes':
+ 'Total segment size for stream segments on disk.<br/>(Sets the x-stream-max-segment-size-bytes argument.)',
'queue-auto-delete':
'If yes, the queue will delete itself after at least one consumer has connected, and then all consumers have disconnected.',
@@ -220,7 +225,7 @@ var HELP = {
'Maximum number of priority levels for the queue to support; if not set, the queue will not support message priorities.<br/>(Sets the "<a target="_blank" href="https://rabbitmq.com/priority.html">x-max-priority</a>" argument.)',
'queue-max-age':
- 'Retention policy for stream queues.<br/>(Sets the x-max-age argument.)',
+ 'Sets the data retention for stream queues in time units </br>(Y=Years, M=Months, D=Days, h=hours, m=minutes, s=seconds).<br/>E.g. "1h" configures the stream to only keep the last 1 hour of received messages.</br></br>(Sets the x-max-age argument.)',
'queue-lazy':
'Set the queue into lazy mode, keeping as many messages as possible on disk to reduce RAM usage; if not set, the queue will keep an in-memory cache to deliver messages as fast as possible.<br/>(Sets the "<a target="_blank" href="https://www.rabbitmq.com/lazy-queues.html">x-queue-mode</a>" argument.)',
@@ -249,12 +254,12 @@ var HELP = {
'queue-process-memory':
'Total memory used by this queue process. This does not include in-memory message bodies (which may be shared between queues and will appear in the global "binaries" memory) but does include everything else.',
- 'queue-consumer-utilisation':
- 'Fraction of the time that the queue is able to immediately deliver messages to consumers. If this number is less than 100% you may be able to deliver messages faster if: \
+ 'queue-consumer-capacity':
+ 'Fraction of the time that the queue is able to immediately deliver messages to consumers. Will be 0 for queues that have no consumers. If this number is less than 100% you may be able to deliver messages faster if: \
<ul> \
<li>There were more consumers or</li> \
<li>The consumers were faster or</li> \
- <li>The consumers had a higher prefetch count</li> \
+ <li>The consumer channels used a higher prefetch count</li> \
</ul>',
'internal-users-only':
@@ -491,6 +496,10 @@ var HELP = {
For a quorum queue, a consumer can be inactive because its owning node is suspected down. <br/><br/> \
(<a href="https://www.rabbitmq.com/consumers.html#active-consumer" target="_blank">Documentation</a>)',
+ 'consumer-owner' :
+ '<a href="https://www.rabbitmq.com/consumers.html">AMQP consumers</a> belong to an AMQP channel, \
+ and <a href="https://www.rabbitmq.com/stream.html">stream consumers</a> belong to a stream connection.',
+
'plugins' :
'Note that only plugins which are both explicitly enabled and running are shown here.',
@@ -630,7 +639,7 @@ var user;
function setup_global_vars() {
var overview = JSON.parse(sync_get('/overview'));
rates_mode = overview.rates_mode;
- user_tags = expand_user_tags(user.tags.split(","));
+ user_tags = expand_user_tags(user.tags);
user_administrator = jQuery.inArray("administrator", user_tags) != -1;
is_user_policymaker = jQuery.inArray("policymaker", user_tags) != -1;
user_monitor = jQuery.inArray("monitoring", user_tags) != -1;
diff --git a/deps/rabbitmq_management/priv/www/js/main.js b/deps/rabbitmq_management/priv/www/js/main.js
index 46da9698d9..ffbf6e9a82 100644
--- a/deps/rabbitmq_management/priv/www/js/main.js
+++ b/deps/rabbitmq_management/priv/www/js/main.js
@@ -480,7 +480,9 @@ function with_update(fun) {
if(outstanding_reqs.length > 0){
return false;
}
- with_reqs(apply_state(current_reqs), [], function(json) {
+ var model = [];
+ model['extra_content'] = []; // magic key for extension point
+ with_reqs(apply_state(current_reqs), model, function(json) {
var html = format(current_template, json);
fun(html);
update_status('ok');
@@ -739,11 +741,11 @@ function postprocess() {
update_multifields();
}
-function url_pagination_template(template, defaultPage, defaultPageSize){
- var page_number_request = fmt_page_number_request(template, defaultPage);
- var page_size = fmt_page_size_request(template, defaultPageSize);
- var name_request = fmt_filter_name_request(template, "");
- var use_regex = fmt_regex_request(template, "") == "checked";
+function url_pagination_template_context(template, context, defaultPage, defaultPageSize){
+ var page_number_request = fmt_page_number_request(context, defaultPage);
+ var page_size = fmt_page_size_request(context, defaultPageSize);
+ var name_request = fmt_filter_name_request(context, "");
+ var use_regex = fmt_regex_request(context, "") == "checked";
if (use_regex) {
name_request = esc(name_request);
}
@@ -754,6 +756,10 @@ function url_pagination_template(template, defaultPage, defaultPageSize){
'&use_regex=' + use_regex;
}
+function url_pagination_template(template, defaultPage, defaultPageSize){
+ return url_pagination_template_context(template, template, defaultPage, defaultPageSize);
+}
+
function stored_page_info(template, page_start){
var pageSize = fmt_strip_tags($('#' + template+'-pagesize').val());
var filterName = fmt_strip_tags($('#' + template+'-name').val());
@@ -780,6 +786,12 @@ function update_pages(template, page_start){
case 'exchanges' : renderExchanges(); break;
case 'connections' : renderConnections(); break;
case 'channels' : renderChannels(); break;
+ default:
+ renderCallback = RENDER_CALLBACKS[template];
+ if (renderCallback != undefined) {
+ renderCallback();
+ }
+ break;
}
}
@@ -1106,7 +1118,13 @@ function with_reqs(reqs, acc, fun) {
if (keys(reqs).length > 0) {
var key = keys(reqs)[0];
with_req('GET', reqs[key], null, function(resp) {
- acc[key] = JSON.parse(resp.responseText);
+ if (key.startsWith("extra_")) {
+ var extraContent = acc["extra_content"];
+ extraContent[key] = JSON.parse(resp.responseText);
+ acc["extra_content"] = extraContent;
+ } else {
+ acc[key] = JSON.parse(resp.responseText);
+ }
var remainder = {};
for (var k in reqs) {
if (k != key) remainder[k] = reqs[k];
@@ -1142,6 +1160,14 @@ function format(template, json) {
}
}
+function maybe_format_extra_queue_content(queue, extraContent) {
+ var content = '';
+ for (var i = 0; i < QUEUE_EXTRA_CONTENT.length; i++) {
+ content += QUEUE_EXTRA_CONTENT[i](queue, extraContent);
+ }
+ return content;
+}
+
function update_status(status) {
var text;
if (status == 'ok')
diff --git a/deps/rabbitmq_management/priv/www/js/tmpl/channel.ejs b/deps/rabbitmq_management/priv/www/js/tmpl/channel.ejs
index 5ecd7f801e..0eae88802f 100644
--- a/deps/rabbitmq_management/priv/www/js/tmpl/channel.ejs
+++ b/deps/rabbitmq_management/priv/www/js/tmpl/channel.ejs
@@ -74,7 +74,7 @@
</div>
<div class="section">
- <h2>Consumers</h2>
+ <h2 class="updatable" >Consumers (<%=(channel.consumer_details.length)%>) </h2>
<div class="hider updatable">
<%= format('consumers', {'mode': 'channel', 'consumers': channel.consumer_details}) %>
</div>
diff --git a/deps/rabbitmq_management/priv/www/js/tmpl/connection.ejs b/deps/rabbitmq_management/priv/www/js/tmpl/connection.ejs
index 9a09f89b72..f107a3981c 100644
--- a/deps/rabbitmq_management/priv/www/js/tmpl/connection.ejs
+++ b/deps/rabbitmq_management/priv/www/js/tmpl/connection.ejs
@@ -1,4 +1,4 @@
-<h1>Connection <%= fmt_string(connection.name) %> <%= fmt_maybe_vhost(connection.vhost) %></h1>
+<h2>Connection <%= fmt_string(connection.name) %> <%= fmt_maybe_vhost(connection.vhost) %></h1>
<% if (!disable_stats) { %>
<div class="section">
@@ -76,7 +76,7 @@
</div>
<div class="section">
- <h2>Channels</h2>
+ <h2 class="updatable" >Channels (<%=(channels.length)%>) </h2>
<div class="hider updatable">
<%= format('channels-list', {'channels': channels, 'mode': 'connection'}) %>
</div>
diff --git a/deps/rabbitmq_management/priv/www/js/tmpl/consumers.ejs b/deps/rabbitmq_management/priv/www/js/tmpl/consumers.ejs
index ffd7b7b603..a0b6ceb66a 100644
--- a/deps/rabbitmq_management/priv/www/js/tmpl/consumers.ejs
+++ b/deps/rabbitmq_management/priv/www/js/tmpl/consumers.ejs
@@ -3,7 +3,7 @@
<thead>
<tr>
<% if (mode == 'queue') { %>
- <th>Channel</th>
+ <th>Channel <span class="help" id="consumer-owner"></th>
<th>Consumer tag</th>
<% } else { %>
<th>Consumer tag</th>
@@ -23,7 +23,7 @@
%>
<tr<%= alt_rows(i) %>>
<% if (mode == 'queue') { %>
- <td><%= link_channel(consumer.channel_details.name) %></td>
+ <td><%= link_consumer_owner(consumer) %></td>
<td><%= fmt_string(consumer.consumer_tag) %></td>
<% } else { %>
<td><%= fmt_string(consumer.consumer_tag) %></td>
diff --git a/deps/rabbitmq_management/priv/www/js/tmpl/policies.ejs b/deps/rabbitmq_management/priv/www/js/tmpl/policies.ejs
index 59c922ad46..91aaefb80b 100644
--- a/deps/rabbitmq_management/priv/www/js/tmpl/policies.ejs
+++ b/deps/rabbitmq_management/priv/www/js/tmpl/policies.ejs
@@ -100,9 +100,8 @@
<td>
<span class="argument-link" field="definition" key="max-length" type="number">Max length</span> |
<span class="argument-link" field="definition" key="max-length-bytes" type="number">Max length bytes</span> |
- <span class="argument-link" field="definition" key="overflow" type="string">Overflow behaviour</span>
- <span class="argument-link" field="definition" key="expires" type="number">Auto expire</span> |
- <span class="help" id="queue-overflow"></span> </br>
+ <span class="argument-link" field="definition" key="overflow" type="string">Overflow behaviour</span> <span class="help" id="queue-overflow"></span> |
+ <span class="argument-link" field="definition" key="expires" type="number">Auto expire</span> </br>
<span class="argument-link" field="definition" key="dead-letter-exchange" type="string">Dead letter exchange</span> |
<span class="argument-link" field="definition" key="dead-letter-routing-key" type="string">Dead letter routing key</span><br />
</td>
@@ -136,8 +135,8 @@
<td>
<span class="argument-link" field="definition" key="max-age" type="string">Max age</span>
<span class="help" id="queue-max-age"></span> |
- <span class="argument-link" field="definition" key="max-segment-size" type="number">Max segment size</span>
- <span class="help" id="queue-max-segment-size"></span>
+ <span class="argument-link" field="definition" key="stream-max-segment-size-bytes" type="number">Max segment size in bytes</span>
+ <span class="help" id="queue-stream-max-segment-size-bytes"></span>
</td>
</tr>
<tr>
diff --git a/deps/rabbitmq_management/priv/www/js/tmpl/queue.ejs b/deps/rabbitmq_management/priv/www/js/tmpl/queue.ejs
index 27297b3757..c2022f73bb 100644
--- a/deps/rabbitmq_management/priv/www/js/tmpl/queue.ejs
+++ b/deps/rabbitmq_management/priv/www/js/tmpl/queue.ejs
@@ -128,20 +128,35 @@
<th>State</th>
<td><%= fmt_object_state(queue) %></td>
</tr>
+ <% if(queue.consumers) { %>
<tr>
<th>Consumers</th>
<td><%= fmt_string(queue.consumers) %></td>
</tr>
+ <% } else if(queue.hasOwnProperty('consumer_details')) { %>
<tr>
- <th>Consumer utilisation <span class="help" id="queue-consumer-utilisation"></th>
- <td><%= fmt_percent(queue.consumer_utilisation) %></td>
+ <th>Consumers</th>
+ <td><%= fmt_string(queue.consumer_details.length) %></td>
</tr>
- <% if (is_quorum(queue) || is_stream(queue)) { %>
+ <% } %>
+ <% if (!is_stream(queue)) { %>
+ <tr>
+ <th>Consumer capacity <span class="help" id="queue-consumer-capacity"></th>
+ <td><%= fmt_percent(queue.consumer_capacity) %></td>
+ </tr>
+ <% } %>
+ <% if (is_quorum(queue)) { %>
<tr>
<th>Open files</th>
<td><%= fmt_table_short(queue.open_files) %></td>
</tr>
<% } %>
+ <% if (is_stream(queue)) { %>
+ <tr>
+ <th>Readers</th>
+ <td><%= fmt_table_short(queue.readers) %></td>
+ </tr>
+ <% } %>
</table>
<table class="facts">
@@ -173,11 +188,16 @@
<td class="r">
<%= fmt_num_thousands(queue.messages_unacknowledged) %>
</td>
+ <% if (is_quorum(queue)) { %>
<td class="r">
<%= fmt_num_thousands(queue.messages_ram) %>
</td>
+ <% } %>
<% if (is_classic(queue)) { %>
<td class="r">
+ <%= fmt_num_thousands(queue.messages_ram) %>
+ </td>
+ <td class="r">
<%= fmt_num_thousands(queue.messages_persistent) %>
</td>
<td class="r">
@@ -186,6 +206,7 @@
<% } %>
</tr>
<tr>
+ <% if (is_classic(queue) || is_quorum(queue)) { %>
<th>
Message body bytes
<span class="help" id="queue-message-body-bytes"></span>
@@ -202,6 +223,7 @@
<td class="r">
<%= fmt_bytes(queue.message_bytes_ram) %>
</td>
+ <% } %>
<% if (is_classic(queue)) { %>
<td class="r">
<%= fmt_bytes(queue.message_bytes_persistent) %>
@@ -248,10 +270,13 @@
<% } %>
+<% if(!disable_stats) { %>
+<%= maybe_format_extra_queue_content(queue, extra_content) %>
+<% } %>
<% if(!disable_stats) { %>
<div class="section-hidden">
- <h2>Consumers</h2>
+ <h2 class="updatable">Consumers (<%=(queue.consumer_details.length)%>) </h2>
<div class="hider updatable">
<%= format('consumers', {'mode': 'queue', 'consumers': queue.consumer_details}) %>
</div>
@@ -259,7 +284,7 @@
<% } %>
<div class="section-hidden">
- <h2>Bindings</h2>
+ <h2 class="updatable">Bindings (<%=(bindings.length)%>) </h2>
<div class="hider">
<div class="bindings-wrapper">
<%= format('bindings', {'mode': 'queue', 'bindings': bindings}) %>
@@ -291,7 +316,7 @@
<td>
<select name="ackmode">
<option value="ack_requeue_true" selected>Nack message requeue true</option>
- <option value="ack_requeue_false">Ack message requeue false</option>
+ <option value="ack_requeue_false">Automatic ack</option>
<option value="reject_requeue_true">Reject requeue true</option>
<option value="reject_requeue_false">Reject requeue false</option>
</select>
@@ -350,6 +375,9 @@
<input type="hidden" name="dest-uri" value="amqp:///<%= esc(queue.vhost) %>"/>
<input type="hidden" name="dest-add-forward-headers" value="false"/>
<input type="hidden" name="ack-mode" value="on-confirm"/>
+ <% if (is_stream(queue)) { %>
+ <input type="hidden" name="src-consumer-args-stream-offset" value="first"/>
+ <% } %>
<input type="hidden" name="redirect" value="#/queues"/>
<table class="form">
diff --git a/deps/rabbitmq_management/priv/www/js/tmpl/queues.ejs b/deps/rabbitmq_management/priv/www/js/tmpl/queues.ejs
index 6e13f84361..cc489eece7 100644
--- a/deps/rabbitmq_management/priv/www/js/tmpl/queues.ejs
+++ b/deps/rabbitmq_management/priv/www/js/tmpl/queues.ejs
@@ -45,8 +45,8 @@
<% if (show_column('queues', 'consumers')) { %>
<th><%= fmt_sort('Consumers', 'consumers') %></th>
<% } %>
-<% if (show_column('queues', 'consumer_utilisation')) { %>
- <th><%= fmt_sort('Consumer utilisation', 'consumer_utilisation') %></th>
+<% if (show_column('queues', 'consumer_capacity')) { %>
+ <th><%= fmt_sort('Consumer capacity', 'consumer_capacity') %></th>
<% } %>
<% if (show_column('queues', 'state')) { %>
<th><%= fmt_sort('State', 'state') %></th>
@@ -157,8 +157,8 @@
<% if (show_column('queues', 'consumers')) { %>
<td class="c"><%= fmt_string(queue.consumers) %></td>
<% } %>
-<% if (show_column('queues', 'consumer_utilisation')) { %>
- <td class="c"><%= fmt_percent(queue.consumer_utilisation) %></td>
+<% if (show_column('queues', 'consumer_capacity')) { %>
+ <td class="c"><%= fmt_percent(queue.consumer_capacity) %></td>
<% } %>
<% if (show_column('queues', 'state')) { %>
<td class="c"><%= fmt_object_state(queue) %></td>
@@ -337,7 +337,7 @@
<% } %>
<% if (queue_type == "stream") { %>
<span class="argument-link" field="arguments" key="x-max-age" type="string">Max time retention</span><span class="help" id="queue-max-age"></span>
- | <span class="argument-link" field="arguments" key="x-max-segment-size" type="number">Max segment size</span><span class="help" id="queue-max-segment-size"></span>
+ | <span class="argument-link" field="arguments" key="x-stream-max-segment-size-bytes" type="number">Max segment size in bytes</span><span class="help" id="queue-stream-max-segment-size-bytes"></span>
| <span class="argument-link" field="arguments" key="x-initial-cluster-size" type="number">Initial cluster size</span><span class="help" id="queue-initial-cluster-size"></span>
| <span class="argument-link" field="arguments" key="x-queue-leader-locator" type="string">Leader locator</span><span class="help" id="queue-leader-locator"></span>
<% } %>
diff --git a/deps/rabbitmq_management/rabbitmq-components.mk b/deps/rabbitmq_management/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_management/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_app.erl b/deps/rabbitmq_management/src/rabbit_mgmt_app.erl
index 256ae56cc6..4d8ec326ff 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_app.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_app).
@@ -24,8 +24,7 @@
-rabbit_boot_step({rabbit_management_load_definitions,
[{description, "Imports definition file at management.load_definitions"},
- {mfa, {rabbit_mgmt_load_definitions, boot, []}},
- {enables, empty_db_check}]}).
+ {mfa, {rabbit_mgmt_load_definitions, boot, []}}]}).
start(_Type, _StartArgs) ->
case application:get_env(rabbitmq_management_agent, disable_metrics_collector, false) of
@@ -134,11 +133,11 @@ get_tls_listener() ->
{ssl_opts, Listener0}
];
CowboyOpts ->
- Listener1 = lists:keydelete(cowboy_opts, 1, Listener0),
+ WithoutCowboyOpts = lists:keydelete(cowboy_opts, 1, Listener0),
[
{port, Port},
{ssl, true},
- {ssl_opts, Listener1},
+ {ssl_opts, WithoutCowboyOpts},
{cowboy_opts, CowboyOpts}
]
end.
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_cors.erl b/deps/rabbitmq_management/src/rabbit_mgmt_cors.erl
index 3871c1f8e2..bf746c5e52 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_cors.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_cors.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% Useful documentation about CORS:
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_csp.erl b/deps/rabbitmq_management/src/rabbit_mgmt_csp.erl
index 80bba64fc2..002faef8cf 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_csp.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_csp.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% Sets CSP header(s) on the response if configured,
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_db.erl b/deps/rabbitmq_management/src/rabbit_mgmt_db.erl
index c45e7b86f6..09ee723516 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_db.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_db.erl
@@ -2,19 +2,18 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_db).
-%% pg2 is deprecated in OTP 23.
--compile(nowarn_deprecated_function).
-
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl").
-include_lib("rabbit_common/include/rabbit.hrl").
-include_lib("rabbit_common/include/rabbit_core_metrics.hrl").
+-include("rabbit_mgmt.hrl").
+
-behaviour(gen_server2).
-export([start_link/0]).
@@ -30,6 +29,8 @@
code_change/3, handle_pre_hibernate/1,
format_message_queue/2]).
+-export([submit/1, get_data_from_nodes/1]).
+
-import(rabbit_misc, [pget/3]).
-type maybe_slide() :: exometer_slide:slide() | not_found.
@@ -369,7 +370,6 @@ detail_queue_stats(Ranges, Objs, Interval) ->
Ids = [id_lookup(queue_stats, Obj) || Obj <- Objs],
DataLookup = get_data_from_nodes({rabbit_mgmt_data, all_detail_queue_data,
[Ids, Ranges]}),
-
QueueStats = adjust_hibernated_memory_use(
[begin
Id = id_lookup(queue_stats, Obj),
@@ -695,7 +695,7 @@ merge_data(_, D1, D2) -> % we assume if we get here both values a maps
maps_merge(fun merge_data/3, D1, D2)
catch
error:Err ->
- rabbit_log:debug("merge_data err ~p got: ~p ~p ~n", [Err, D1, D2]),
+ rabbit_log:debug("merge_data err ~p got: ~p ~p", [Err, D1, D2]),
case is_map(D1) of
true -> D1;
false -> D2
@@ -732,7 +732,7 @@ created_stats_delegated(Type) ->
-spec delegate_invoke(mfargs()) -> [any()].
delegate_invoke(FunOrMFA) ->
- MemberPids = [P || P <- pg2:get_members(management_db)],
+ MemberPids = [P || P <- pg:get_members(?MANAGEMENT_PG_SCOPE, ?MANAGEMENT_PG_GROUP)],
{Results, Errors} = delegate:invoke(MemberPids, ?DELEGATE_PREFIX, FunOrMFA),
case Errors of
[] -> ok;
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_db_cache.erl b/deps/rabbitmq_management/src/rabbit_mgmt_db_cache.erl
index b4d92dd543..8634453b88 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_db_cache.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_db_cache.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(rabbit_mgmt_db_cache).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_db_cache_sup.erl b/deps/rabbitmq_management/src/rabbit_mgmt_db_cache_sup.erl
index 1805f25872..542309069f 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_db_cache_sup.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_db_cache_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(rabbit_mgmt_db_cache_sup).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_dispatcher.erl b/deps/rabbitmq_management/src/rabbit_mgmt_dispatcher.erl
index e0de5c8bce..66886ec0cc 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_dispatcher.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_dispatcher.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_dispatcher).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_extension.erl b/deps/rabbitmq_management/src/rabbit_mgmt_extension.erl
index 1ee72aaf36..4093981fc7 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_extension.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_extension.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_extension).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_headers.erl b/deps/rabbitmq_management/src/rabbit_mgmt_headers.erl
index 89bb464c01..e4af081870 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_headers.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_headers.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This module contains helper functions that control
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_hsts.erl b/deps/rabbitmq_management/src/rabbit_mgmt_hsts.erl
index 004f3d4e7e..03e0f96b44 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_hsts.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_hsts.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% Sets HSTS header(s) on the response if configured,
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_load_definitions.erl b/deps/rabbitmq_management/src/rabbit_mgmt_load_definitions.erl
index ba9962d6cf..7a867dfe0e 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_load_definitions.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_load_definitions.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_load_definitions).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_reset_handler.erl b/deps/rabbitmq_management/src/rabbit_mgmt_reset_handler.erl
index 4eb0219e46..a2e9f2d3c4 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_reset_handler.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_reset_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% When management extensions are enabled and/or disabled at runtime, the
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_stats.erl b/deps/rabbitmq_management/src/rabbit_mgmt_stats.erl
index 14b3432790..8629ab32f7 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_stats.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_stats.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_stats).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_sup.erl b/deps/rabbitmq_management/src/rabbit_mgmt_sup.erl
index 5f34f0d160..d18dae18c6 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_sup.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_sup).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_sup_sup.erl b/deps/rabbitmq_management/src/rabbit_mgmt_sup_sup.erl
index e8dd164869..4d976e8452 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_sup_sup.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_sup_sup.erl
@@ -2,27 +2,52 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_sup_sup).
--behaviour(supervisor2).
+-behaviour(supervisor).
-export([init/1]).
-export([start_link/0, start_child/0]).
-include_lib("rabbit_common/include/rabbit.hrl").
+-include("rabbit_mgmt.hrl").
start_child() ->
- supervisor2:start_child(?MODULE, sup()).
+ supervisor:start_child(?MODULE, sup()).
sup() ->
- {rabbit_mgmt_sup, {rabbit_mgmt_sup, start_link, []},
- temporary, ?SUPERVISOR_WAIT, supervisor, [rabbit_mgmt_sup]}.
+ #{
+ id => rabbit_mgmt_sup,
+ start => {rabbit_mgmt_sup, start_link, []},
+ restart => temporary,
+ shutdown => ?SUPERVISOR_WAIT,
+ type => supervisor,
+ modules => [rabbit_mgmt_sup]
+ }.
init([]) ->
- {ok, {{one_for_one, 0, 1}, [sup()]}}.
+ %% This scope is used in the child process, so start it
+ %% early. We don't attach it to the supervision tree because
+ %%
+ %% * rabbitmq_management and rabbitmq_management_agent share a scope
+ %% * start an already running scope results in an "already started" error returned
+ %% * such errors wreck supervision tree startup
+ %%
+ %% So we expect management agent to start the scope as part of its
+ %% supervision tree and only start it here for environments
+ %% such as tests that may be testing parts of this plugin in isolation.
+ _ = pg:start_link(?MANAGEMENT_PG_SCOPE),
+
+ Flags = #{
+ strategy => one_for_one,
+ intensity => 0,
+ period => 1
+ },
+ Specs = [sup()],
+ {ok, {Flags, Specs}}.
start_link() ->
- supervisor2:start_link({local, ?MODULE}, ?MODULE, []).
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_util.erl b/deps/rabbitmq_management/src/rabbit_mgmt_util.erl
index 93f167e480..97f7a01b7b 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_util.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_util).
@@ -15,6 +15,7 @@
is_authorized_user/4,
is_authorized_monitor/2, is_authorized_policies/2,
is_authorized_vhost_visible/2,
+ is_authorized_vhost_visible_for_monitoring/2,
is_authorized_global_parameters/2]).
-export([bad_request/3, bad_request_exception/4, internal_server_error/4,
@@ -117,6 +118,15 @@ is_authorized_vhost_visible(ReqData, Context) ->
is_admin(Tags) orelse user_matches_vhost_visible(ReqData, User)
end).
+is_authorized_vhost_visible_for_monitoring(ReqData, Context) ->
+ is_authorized(ReqData, Context,
+ <<"User not authorised to access virtual host">>,
+ fun(#user{tags = Tags} = User) ->
+ is_admin(Tags)
+ orelse is_monitor(Tags)
+ orelse user_matches_vhost_visible(ReqData, User)
+ end).
+
disable_stats(ReqData) ->
MgmtOnly = case qs_val(<<"disable_stats">>, ReqData) of
<<"true">> -> true;
@@ -280,7 +290,7 @@ is_authorized(ReqData, Context, Username, Password, ErrorMsg, Fun) ->
rabbit_core_metrics:auth_attempt_failed(RemoteAddress, Username, http),
rabbit_log:warning("HTTP access denied: ~s",
[rabbit_misc:format(Msg, Args)]),
- not_authorised(<<"Login failed">>, ReqData, Context)
+ not_authenticated(<<"Login failed">>, ReqData, Context)
end.
vhost_from_headers(ReqData) ->
@@ -726,7 +736,12 @@ a2b(B) -> B.
bad_request(Reason, ReqData, Context) ->
halt_response(400, bad_request, Reason, ReqData, Context).
+not_authenticated(Reason, ReqData, Context) ->
+ ReqData1 = cowboy_req:set_resp_header(<<"www-authenticate">>, ?AUTH_REALM, ReqData),
+ halt_response(401, not_authorized, Reason, ReqData1, Context).
+
not_authorised(Reason, ReqData, Context) ->
+ %% TODO: consider changing to 403 in 4.0
halt_response(401, not_authorised, Reason, ReqData, Context).
not_found(Reason, ReqData, Context) ->
@@ -953,6 +968,7 @@ with_channel(VHost, ReqData,
virtual_host = VHost},
case amqp_connection:start(Params) of
{ok, Conn} ->
+ _ = erlang:link(Conn),
{ok, Ch} = amqp_connection:open_channel(Conn),
try
Fun(Ch)
@@ -973,6 +989,7 @@ with_channel(VHost, ReqData,
ServerClose =:= server_initiated_hard_close ->
bad_request_exception(Code, Reason, ReqData, Context)
after
+ erlang:unlink(Conn),
catch amqp_channel:close(Ch),
catch amqp_connection:close(Conn)
end;
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_aliveness_test.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_aliveness_test.erl
index 992ec954e2..d3f1370ec5 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_aliveness_test.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_aliveness_test.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_aliveness_test).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_auth.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_auth.erl
index 6899fc54ee..6cc43a23f4 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_auth.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_auth.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_auth).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_auth_attempts.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_auth_attempts.erl
index cdd11826ea..04b6aafd0e 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_auth_attempts.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_auth_attempts.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_auth_attempts).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_binding.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_binding.erl
index 4396edc53b..b1cd8cb17d 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_binding.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_binding.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_binding).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_bindings.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_bindings.erl
index 5ed0a82f8d..da526488fc 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_bindings.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_bindings.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_bindings).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_channel.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_channel.erl
index 0b4aae6c13..76311d84eb 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_channel.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_channel.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_channel).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels.erl
index a064b2f9f9..343421cc08 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_channels).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl
index 454a29e0bb..fd5b88b482 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_channels_vhost).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_cluster_name.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_cluster_name.erl
index 132da93ad2..11c5aa67d7 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_cluster_name.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_cluster_name.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_cluster_name).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connection.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connection.erl
index 7de605e6a5..5d44743df3 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connection.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connection.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_connection).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connection_channels.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connection_channels.erl
index dfbcfb5ba1..41e6a763dc 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connection_channels.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connection_channels.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_connection_channels).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections.erl
index 0e1345abca..585dc7557a 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_connections).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl
index 1840a7ae45..aafcca9dd1 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_connections_vhost).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_consumers.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_consumers.erl
index 56945d3a5d..6f74849cb7 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_consumers.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_consumers.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(rabbit_mgmt_wm_consumers).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_definitions.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_definitions.erl
index c0687993a9..8314926a4a 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_definitions.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_definitions.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_definitions).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange.erl
index 2d5930d1d0..e5c36b51b8 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_exchange).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange_publish.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange_publish.erl
index 7e90b54ba1..b5fbf44e8a 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange_publish.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchange_publish.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_exchange_publish).
@@ -93,6 +93,9 @@ bad({shutdown, {server_initiated_close, Code, Reason}}, ReqData, Context) ->
rabbit_mgmt_util:bad_request_exception(Code, Reason, ReqData, Context);
bad(rejected, ReqData, Context) ->
Msg = "Unable to publish message. Check queue limits.",
+ rabbit_mgmt_util:bad_request_exception(rejected, Msg, ReqData, Context);
+bad({{coordinator_unavailable, _}, _}, ReqData, Context) ->
+ Msg = "Unable to publish message. Coordinator unavailable.",
rabbit_mgmt_util:bad_request_exception(rejected, Msg, ReqData, Context).
is_authorized(ReqData, Context) ->
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchanges.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchanges.erl
index 9f7abab20f..a0b008d239 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchanges.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_exchanges.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_exchanges).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_extensions.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_extensions.erl
index 26c3ccbee6..4e7f43a424 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_extensions.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_extensions.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_extensions).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flag_enable.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flag_enable.erl
index fafb993951..436f2cecdc 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flag_enable.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flag_enable.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_feature_flag_enable).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flags.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flags.erl
index 3f25045a36..5f125c0bbc 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flags.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_feature_flags.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_feature_flags).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameter.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameter.erl
index a81c5be8fa..8a87f3ca41 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameter.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameter.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_global_parameter).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameters.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameters.erl
index fa23283ad5..420f58872d 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameters.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_global_parameters.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_global_parameters).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_alarms.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_alarms.erl
index 06db3bd79e..b8791e5b00 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_alarms.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_alarms.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% An HTTP API counterpart of 'rabbitmq-diagnostics check_alarms'
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_certificate_expiration.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_certificate_expiration.erl
index f0fd466b2a..3d6dcd1062 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_certificate_expiration.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_certificate_expiration.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% An HTTP API counterpart of 'rabbitmq-diagnostics check_certificate_expiration'
@@ -80,7 +80,7 @@ convert(Time, Unit) ->
error:badarg ->
{error, "Invalid expiration value."};
invalid_unit ->
- {error, "Time unit not recognised. Use: days, seconds, months, years."}
+ {error, "Time unit not recognised. Supported units: days, weeks, months, years."}
end.
do_convert(Time, "days") ->
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_local_alarms.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_local_alarms.erl
index 4553efa3e2..aff61132ad 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_local_alarms.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_local_alarms.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% An HTTP API counterpart of 'rabbitmq-dignoastics check_local_alarms'
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_mirror_sync_critical.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_mirror_sync_critical.erl
index cef5512551..96f6684668 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_mirror_sync_critical.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_mirror_sync_critical.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% An HTTP API counterpart of 'rabbitmq-diagnostics check_if_node_is_quorum_critical'
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_quorum_critical.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_quorum_critical.erl
index 857c72b30b..783a39adec 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_quorum_critical.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_node_is_quorum_critical.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% An HTTP API counterpart of 'rabbitmq-diagnostics check_if_node_is_quorum_critical'
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_port_listener.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_port_listener.erl
index 1fcc00f613..f7d8e15c48 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_port_listener.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_port_listener.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% An HTTP API counterpart of 'rabbitmq-diagnostics check_port_listener'
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_protocol_listener.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_protocol_listener.erl
index a0b7e4e6dd..17a04e259a 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_protocol_listener.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_protocol_listener.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% An HTTP API counterpart of 'rabbitmq-diagnostics check_protocol_listener'
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_virtual_hosts.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_virtual_hosts.erl
index 0bc9680adc..980e117821 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_virtual_hosts.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_health_check_virtual_hosts.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% An HTTP API counterpart of 'rabbitmq-diagnostics check_virtual_hosts'
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_healthchecks.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_healthchecks.erl
index 4fc61bd0a7..1cf011d444 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_healthchecks.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_healthchecks.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This original One True Health Checkâ„¢ has been deprecated as too coarse-grained,
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_limit.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_limit.erl
index 5f0e27c4ba..5c9dc2a15b 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_limit.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_limit.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_limit).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_limits.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_limits.erl
index 96fbf64100..cd83201666 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_limits.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_limits.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_limits).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_login.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_login.erl
index fec7c99643..2a275fa7f4 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_login.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_login.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_login).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_node.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_node.erl
index 52c1d4d9bb..d7ee8a9ccb 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_node.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_node.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_node).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory.erl
index 19e4237b44..99b9a34530 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_node_memory).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory_ets.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory_ets.erl
index 2bc40d22ce..5b027cf62f 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory_ets.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_node_memory_ets.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_node_memory_ets).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_nodes.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_nodes.erl
index c3de2e0bd2..6727f67cf4 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_nodes.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_nodes.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_nodes).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policies.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policies.erl
index 7490c427dd..b4886e3ae7 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policies.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policies.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_operator_policies).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policy.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policy.erl
index 85bec3631d..9a3a5e81d6 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policy.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_operator_policy.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_operator_policy).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_overview.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_overview.erl
index 3a7e821b7d..2347c64777 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_overview.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_overview.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_overview).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_parameter.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_parameter.erl
index 17a97ca4bf..972b549610 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_parameter.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_parameter.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_parameter).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_parameters.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_parameters.erl
index 6acd7a608f..07ea20b6a6 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_parameters.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_parameters.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_parameters).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_permission.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_permission.erl
index 74df5ab960..4942e649a6 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_permission.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_permission.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_permission).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions.erl
index ad1799b84b..17cc175ddf 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_permissions).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_user.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_user.erl
index ad1a2e5f96..c1c2858656 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_user.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_user.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_permissions_user).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_vhost.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_vhost.erl
index 435c15faa4..ee33cdf44c 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_vhost.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_permissions_vhost.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_permissions_vhost).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_policies.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_policies.erl
index c2b1bf1a1a..cff7b92d55 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_policies.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_policies.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_policies).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_policy.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_policy.erl
index 5ab8033925..93f2b5e862 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_policy.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_policy.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_policy).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue.erl
index 6560be1524..bb129e4882 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_queue).
@@ -60,10 +60,16 @@ to_json(ReqData, Context) ->
accept_content(ReqData, Context) ->
Name = rabbit_mgmt_util:id(queue, ReqData),
- rabbit_mgmt_util:direct_request(
- 'queue.declare',
- fun rabbit_mgmt_format:format_accept_content/1,
- [{queue, Name}], "Declare queue error: ~s", ReqData, Context).
+ %% NOTE: ?FRAMING currently defined as 0.9.1 hence validating length
+ case rabbit_parameter_validation:amqp091_queue_name(queue, Name) of
+ ok ->
+ rabbit_mgmt_util:direct_request(
+ 'queue.declare',
+ fun rabbit_mgmt_format:format_accept_content/1,
+ [{queue, Name}], "Declare queue error: ~s", ReqData, Context);
+ {error, F, A} ->
+ rabbit_mgmt_util:bad_request(iolist_to_binary(io_lib:format(F ++ "~n", A)), ReqData, Context)
+ end.
delete_resource(ReqData, Context) ->
%% We need to retrieve manually if-unused and if-empty, as the HTTP API uses '-'
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_actions.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_actions.erl
index c3b13f40c2..cea3a54e34 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_actions.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_actions.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_queue_actions).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_get.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_get.erl
index 88ad9a0034..8faf48ab03 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_get.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_get.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_queue_get).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_purge.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_purge.erl
index b250984d11..836b57b2ac 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_purge.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queue_purge.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_queue_purge).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queues.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queues.erl
index 7df50f61d9..d06717da0f 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_queues.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_queues.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_queues).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_rebalance_queues.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_rebalance_queues.erl
index 982655493c..27d3903a4c 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_rebalance_queues.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_rebalance_queues.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_rebalance_queues).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_redirect.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_redirect.erl
index c5b3808d5a..4481bdd0c7 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_redirect.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_redirect.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_redirect).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_reset.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_reset.erl
index ea3d698ece..142431accf 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_reset.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_reset.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_reset).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_static.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_static.erl
index 4a7f608a8a..e679150319 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_static.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_static.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2010-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% Alias for cowboy_static that accepts a list of directories
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permission.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permission.erl
index 99f25bb69e..e87ee128e3 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permission.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permission.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_topic_permission).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions.erl
index c1406043cd..0ac0a1e359 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_topic_permissions).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_user.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_user.erl
index 7d9c2d58fc..b7fb35de06 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_user.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_user.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_topic_permissions_user).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_vhost.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_vhost.erl
index 8540ee9a77..021cc2e61b 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_vhost.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_topic_permissions_vhost.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_topic_permissions_vhost).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_user.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_user.erl
index 4c014565f1..70f95108ac 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_user.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_user.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_user).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limit.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limit.erl
index 8e59c55371..fa60238ec4 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limit.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limit.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_user_limit).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limits.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limits.erl
index 7e4d541f32..41212609d1 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limits.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_user_limits.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_user_limits).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_users.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_users.erl
index 67b8345ab9..fcf82685c8 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_users.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_users.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_users).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_users_bulk_delete.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_users_bulk_delete.erl
index 6f6a7863a2..bdd77b8590 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_users_bulk_delete.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_users_bulk_delete.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_users_bulk_delete).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost.erl
index e62eecac97..dad0a862ba 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_vhost).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost_restart.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost_restart.erl
index e22ff058c7..17f10cc8d9 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost_restart.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhost_restart.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2011-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2011-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_vhost_restart).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhosts.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhosts.erl
index 7380faa4f0..83a9ae2493 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhosts.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_vhosts.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_vhosts).
diff --git a/deps/rabbitmq_management/src/rabbit_mgmt_wm_whoami.erl b/deps/rabbitmq_management/src/rabbit_mgmt_wm_whoami.erl
index 22a1063443..477c881dc8 100644
--- a/deps/rabbitmq_management/src/rabbit_mgmt_wm_whoami.erl
+++ b/deps/rabbitmq_management/src/rabbit_mgmt_wm_whoami.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_wm_whoami).
diff --git a/deps/rabbitmq_management/test/cache_SUITE.erl b/deps/rabbitmq_management/test/cache_SUITE.erl
index 00f9cd56c8..62779bffad 100644
--- a/deps/rabbitmq_management/test/cache_SUITE.erl
+++ b/deps/rabbitmq_management/test/cache_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(cache_SUITE).
diff --git a/deps/rabbitmq_management/test/clustering_SUITE.erl b/deps/rabbitmq_management/test/clustering_SUITE.erl
index fc096962af..30a42ebc9c 100644
--- a/deps/rabbitmq_management/test/clustering_SUITE.erl
+++ b/deps/rabbitmq_management/test/clustering_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(clustering_SUITE).
diff --git a/deps/rabbitmq_management/test/clustering_prop_SUITE.erl b/deps/rabbitmq_management/test/clustering_prop_SUITE.erl
index 98790745db..f2f132d4c6 100644
--- a/deps/rabbitmq_management/test/clustering_prop_SUITE.erl
+++ b/deps/rabbitmq_management/test/clustering_prop_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(clustering_prop_SUITE).
diff --git a/deps/rabbitmq_management/test/config_schema_SUITE.erl b/deps/rabbitmq_management/test/config_schema_SUITE.erl
index e40337c60a..60f2ae429f 100644
--- a/deps/rabbitmq_management/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_management/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_management/test/listener_config_SUITE.erl b/deps/rabbitmq_management/test/listener_config_SUITE.erl
index 46d65d2be3..e069f0f912 100644
--- a/deps/rabbitmq_management/test/listener_config_SUITE.erl
+++ b/deps/rabbitmq_management/test/listener_config_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(listener_config_SUITE).
diff --git a/deps/rabbitmq_management/test/rabbit_mgmt_http_SUITE.erl b/deps/rabbitmq_management/test/rabbit_mgmt_http_SUITE.erl
index 85ae582503..6624ffba12 100644
--- a/deps/rabbitmq_management/test/rabbit_mgmt_http_SUITE.erl
+++ b/deps/rabbitmq_management/test/rabbit_mgmt_http_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_http_SUITE).
@@ -18,7 +18,7 @@
assert_keys/2, assert_no_keys/2,
http_get/2, http_get/3, http_get/5,
http_get_no_auth/3,
- http_get_no_map/2,
+ http_get_as_proplist/2,
http_put/4, http_put/6,
http_post/4, http_post/6,
http_upload_raw/8,
@@ -257,6 +257,11 @@ end_per_testcase0(T, Config)
[rabbit_ct_broker_helpers:delete_vhost(Config, Name)
|| #{name := Name} <- Vhosts],
Config;
+end_per_testcase0(definitions_password_test, Config) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0,
+ application, unset_env,
+ [rabbit, password_hashing_module]),
+ Config;
end_per_testcase0(queues_test, Config) ->
rabbit_ct_broker_helpers:delete_vhost(Config, <<"downvhost">>),
Config;
@@ -279,7 +284,7 @@ end_per_testcase0(user_limits_list_test, Config) ->
Config;
end_per_testcase0(user_limit_set_test, Config) ->
rabbit_ct_broker_helpers:delete_vhost(Config, <<"limit_test_vhost_1">>),
- rabbit_ct_broker_helpers:delete_user(Config, <<"limit_test_user_1_user">>),
+ rabbit_ct_broker_helpers:delete_user(Config, <<"limit_test_user_1">>),
rabbit_ct_broker_helpers:delete_user(Config, <<"limit_test_vhost_1_user">>),
Config;
end_per_testcase0(permissions_vhost_test, Config) ->
@@ -410,9 +415,13 @@ assert_percentage(Breakdown0, ExtraMargin) ->
auth_test(Config) ->
http_put(Config, "/users/user", [{password, <<"user">>},
{tags, <<"">>}], {group, '2xx'}),
- test_auth(Config, ?NOT_AUTHORISED, []),
+ EmptyAuthResponseHeaders = test_auth(Config, ?NOT_AUTHORISED, []),
+ ?assertEqual(true, lists:keymember("www-authenticate", 1, EmptyAuthResponseHeaders)),
+ %% NOTE: this one won't have www-authenticate in the response,
+ %% because user/password are ok, tags are not
test_auth(Config, ?NOT_AUTHORISED, [auth_header("user", "user")]),
- test_auth(Config, ?NOT_AUTHORISED, [auth_header("guest", "gust")]),
+ WrongAuthResponseHeaders = test_auth(Config, ?NOT_AUTHORISED, [auth_header("guest", "gust")]),
+ ?assertEqual(true, lists:keymember("www-authenticate", 1, WrongAuthResponseHeaders)),
test_auth(Config, ?OK, [auth_header("guest", "guest")]),
http_delete(Config, "/users/user", {group, '2xx'}),
passed.
@@ -491,25 +500,25 @@ vhosts_trace_test(Config) ->
passed.
users_test(Config) ->
- assert_item(#{name => <<"guest">>, tags => <<"administrator">>},
+ assert_item(#{name => <<"guest">>, tags => [<<"administrator">>]},
http_get(Config, "/whoami")),
rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env,
[rabbitmq_management, login_session_timeout, 100]),
assert_item(#{name => <<"guest">>,
- tags => <<"administrator">>,
+ tags => [<<"administrator">>],
login_session_timeout => 100},
http_get(Config, "/whoami")),
http_get(Config, "/users/myuser", ?NOT_FOUND),
http_put_raw(Config, "/users/myuser", "Something not JSON", ?BAD_REQUEST),
http_put(Config, "/users/myuser", [{flim, <<"flam">>}], ?BAD_REQUEST),
- http_put(Config, "/users/myuser", [{tags, <<"management">>},
+ http_put(Config, "/users/myuser", [{tags, [<<"management">>]},
{password, <<"myuser">>}],
{group, '2xx'}),
http_put(Config, "/users/myuser", [{password_hash, <<"not_hash">>}], ?BAD_REQUEST),
http_put(Config, "/users/myuser", [{password_hash,
<<"IECV6PZI/Invh0DL187KFpkO5Jc=">>},
{tags, <<"management">>}], {group, '2xx'}),
- assert_item(#{name => <<"myuser">>, tags => <<"management">>,
+ assert_item(#{name => <<"myuser">>, tags => [<<"management">>],
password_hash => <<"IECV6PZI/Invh0DL187KFpkO5Jc=">>,
hashing_algorithm => <<"rabbit_password_hashing_sha256">>},
http_get(Config, "/users/myuser")),
@@ -517,17 +526,17 @@ users_test(Config) ->
http_put(Config, "/users/myuser", [{password_hash,
<<"IECV6PZI/Invh0DL187KFpkO5Jc=">>},
{hashing_algorithm, <<"rabbit_password_hashing_md5">>},
- {tags, <<"management">>}], {group, '2xx'}),
- assert_item(#{name => <<"myuser">>, tags => <<"management">>,
+ {tags, [<<"management">>]}], {group, '2xx'}),
+ assert_item(#{name => <<"myuser">>, tags => [<<"management">>],
password_hash => <<"IECV6PZI/Invh0DL187KFpkO5Jc=">>,
hashing_algorithm => <<"rabbit_password_hashing_md5">>},
http_get(Config, "/users/myuser")),
http_put(Config, "/users/myuser", [{password, <<"password">>},
- {tags, <<"administrator, foo">>}], {group, '2xx'}),
- assert_item(#{name => <<"myuser">>, tags => <<"administrator,foo">>},
+ {tags, [<<"administrator">>, <<"foo">>]}], {group, '2xx'}),
+ assert_item(#{name => <<"myuser">>, tags => [<<"administrator">>, <<"foo">>]},
http_get(Config, "/users/myuser")),
- assert_list(lists:sort([#{name => <<"myuser">>, tags => <<"administrator,foo">>},
- #{name => <<"guest">>, tags => <<"administrator">>}]),
+ assert_list(lists:sort([#{name => <<"myuser">>, tags => [<<"administrator">>, <<"foo">>]},
+ #{name => <<"guest">>, tags => [<<"administrator">>]}]),
lists:sort(http_get(Config, "/users"))),
test_auth(Config, ?OK, [auth_header("myuser", "password")]),
http_delete(Config, "/users/myuser", {group, '2xx'}),
@@ -536,7 +545,7 @@ users_test(Config) ->
passed.
without_permissions_users_test(Config) ->
- assert_item(#{name => <<"guest">>, tags => <<"administrator">>},
+ assert_item(#{name => <<"guest">>, tags => [<<"administrator">>]},
http_get(Config, "/whoami")),
http_put(Config, "/users/myuser", [{password_hash,
<<"IECV6PZI/Invh0DL187KFpkO5Jc=">>},
@@ -546,7 +555,7 @@ without_permissions_users_test(Config) ->
http_put(Config, "/users/myuserwithoutpermissions", [{password_hash,
<<"IECV6PZI/Invh0DL187KFpkO5Jc=">>},
{tags, <<"management">>}], {group, '2xx'}),
- assert_list([#{name => <<"myuserwithoutpermissions">>, tags => <<"management">>,
+ assert_list([#{name => <<"myuserwithoutpermissions">>, tags => [<<"management">>],
hashing_algorithm => <<"rabbit_password_hashing_sha256">>,
password_hash => <<"IECV6PZI/Invh0DL187KFpkO5Jc=">>}],
http_get(Config, "/users/without-permissions")),
@@ -555,7 +564,7 @@ without_permissions_users_test(Config) ->
passed.
users_bulk_delete_test(Config) ->
- assert_item(#{name => <<"guest">>, tags => <<"administrator">>},
+ assert_item(#{name => <<"guest">>, tags => [<<"administrator">>]},
http_get(Config, "/whoami")),
http_put(Config, "/users/myuser1", [{tags, <<"management">>}, {password, <<"myuser">>}],
{group, '2xx'}),
@@ -584,9 +593,9 @@ users_legacy_administrator_test(Config) ->
http_put(Config, "/users/myuser2", [{administrator, <<"false">>},
{password, <<"myuser2">>}],
{group, '2xx'}),
- assert_item(#{name => <<"myuser1">>, tags => <<"administrator">>},
+ assert_item(#{name => <<"myuser1">>, tags => [<<"administrator">>]},
http_get(Config, "/users/myuser1")),
- assert_item(#{name => <<"myuser2">>, tags => <<"">>},
+ assert_item(#{name => <<"myuser2">>, tags => []},
http_get(Config, "/users/myuser2")),
http_delete(Config, "/users/myuser1", {group, '2xx'}),
http_delete(Config, "/users/myuser2", {group, '2xx'}),
@@ -685,7 +694,7 @@ updating_a_user_without_password_or_hash_clears_password_test(Config) ->
%% clear users' credentials
http_put(Config, "/users/myuser", [{tags, <<"management">>}], [?CREATED, ?NO_CONTENT]),
assert_item(#{name => <<"myuser">>,
- tags => <<"management">>,
+ tags => [<<"management">>],
password_hash => <<>>,
hashing_algorithm => <<"rabbit_password_hashing_sha256">>},
http_get(Config, "/users/myuser")),
@@ -723,21 +732,21 @@ updating_tags_of_a_passwordless_user_test(Config) ->
%% clear user's password
http_put(Config, "/users/abc", [{tags, <<"management">>}], [?CREATED, ?NO_CONTENT]),
assert_item(#{name => ?NON_GUEST_USERNAME,
- tags => <<"management">>,
+ tags => [<<"management">>],
password_hash => <<>>,
hashing_algorithm => <<"rabbit_password_hashing_sha256">>},
http_get(Config, "/users/abc")),
http_put(Config, "/users/abc", [{tags, <<"impersonator">>}], [?CREATED, ?NO_CONTENT]),
assert_item(#{name => ?NON_GUEST_USERNAME,
- tags => <<"impersonator">>,
+ tags => [<<"impersonator">>],
password_hash => <<>>,
hashing_algorithm => <<"rabbit_password_hashing_sha256">>},
http_get(Config, "/users/abc")),
http_put(Config, "/users/abc", [{tags, <<"">>}], [?CREATED, ?NO_CONTENT]),
assert_item(#{name => ?NON_GUEST_USERNAME,
- tags => <<"">>,
+ tags => [],
password_hash => <<>>,
hashing_algorithm => <<"rabbit_password_hashing_sha256">>},
http_get(Config, "/users/abc")),
@@ -930,8 +939,8 @@ multiple_invalid_connections_test(Config) ->
passed.
test_auth(Config, Code, Headers) ->
- {ok, {{_, Code, _}, _, _}} = req(Config, get, "/overview", Headers),
- passed.
+ {ok, {{_, Code, _}, RespHeaders, _}} = req(Config, get, "/overview", Headers),
+ RespHeaders.
exchanges_test(Config) ->
%% Can list exchanges
@@ -1092,12 +1101,12 @@ queues_well_formed_json_test(Config) ->
http_put(Config, "/queues/%2F/foo", Good, {group, '2xx'}),
http_put(Config, "/queues/%2F/baz", Good, {group, '2xx'}),
- Queues = http_get_no_map(Config, "/queues/%2F"),
+ Queues = http_get_as_proplist(Config, "/queues/%2F"),
%% Ensure keys are unique
[begin
- Sorted = lists:sort(Q),
- Sorted = lists:usort(Q)
- end || Q <- Queues],
+ Q = rabbit_data_coercion:to_proplist(Q0),
+ ?assertEqual(lists:sort(Q), lists:usort(Q))
+ end || Q0 <- Queues],
http_delete(Config, "/queues/%2F/foo", {group, '2xx'}),
http_delete(Config, "/queues/%2F/baz", {group, '2xx'}),
@@ -1613,7 +1622,7 @@ definitions_test(Config) ->
#{name => <<"myuser">>,
password_hash => <<"WAbU0ZIcvjTpxM3Q3SbJhEAM2tQ=">>,
hashing_algorithm => <<"rabbit_password_hashing_sha256">>,
- tags => <<"management">>}),
+ tags => [<<"management">>]}),
defs(Config, vhosts, "/vhosts/myvhost", put,
#{name => <<"myvhost">>}),
defs(Config, permissions, "/permissions/%2F/guest", put,
@@ -1788,7 +1797,7 @@ definitions_password_test(Config) ->
Expected35 = #{name => <<"myuser">>,
password_hash => <<"WAbU0ZIcvjTpxM3Q3SbJhEAM2tQ=">>,
hashing_algorithm => <<"rabbit_password_hashing_md5">>,
- tags => <<"management">>},
+ tags => [<<"management">>]},
http_post(Config, "/definitions", Config35, {group, '2xx'}),
Definitions35 = http_get(Config, "/definitions", ?OK),
ct:pal("Definitions35: ~p", [Definitions35]),
@@ -1804,7 +1813,7 @@ definitions_password_test(Config) ->
Expected36 = #{name => <<"myuser">>,
password_hash => <<"WAbU0ZIcvjTpxM3Q3SbJhEAM2tQ=">>,
hashing_algorithm => <<"rabbit_password_hashing_sha256">>,
- tags => <<"management">>},
+ tags => [<<"management">>]},
http_post(Config, "/definitions", Config36, {group, '2xx'}),
Definitions36 = http_get(Config, "/definitions", ?OK),
@@ -1824,7 +1833,7 @@ definitions_password_test(Config) ->
ExpectedDefault = #{name => <<"myuser">>,
password_hash => <<"WAbU0ZIcvjTpxM3Q3SbJhEAM2tQ=">>,
hashing_algorithm => <<"rabbit_password_hashing_sha512">>,
- tags => <<"management">>},
+ tags => [<<"management">>]},
http_post(Config, "/definitions", ConfigDefault, {group, '2xx'}),
DefinitionsDefault = http_get(Config, "/definitions", ?OK),
@@ -2462,7 +2471,8 @@ format_output_test(Config) ->
http_put(Config, "/queues/%2F/test0", QArgs, {group, '2xx'}),
timer:sleep(2000),
assert_list([#{name => <<"test0">>,
- consumer_utilisation => null,
+ consumer_capacity => 0,
+ consumer_utilisation => 0,
exclusive_consumer_tag => null,
recoverable_slaves => null}], http_get(Config, "/queues", ?OK)),
http_delete(Config, "/queues/%2F/test0", {group, '2xx'}),
diff --git a/deps/rabbitmq_management/test/rabbit_mgmt_http_health_checks_SUITE.erl b/deps/rabbitmq_management/test/rabbit_mgmt_http_health_checks_SUITE.erl
index 8ef17ed29c..fafac8641a 100644
--- a/deps/rabbitmq_management/test/rabbit_mgmt_http_health_checks_SUITE.erl
+++ b/deps/rabbitmq_management/test/rabbit_mgmt_http_health_checks_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_http_health_checks_SUITE).
@@ -91,6 +91,15 @@ end_per_group(_, Config) ->
Steps = Teardown0 ++ Teardown1,
rabbit_ct_helpers:run_teardown_steps(Config, Steps).
+init_per_testcase(Testcase, Config)
+ when Testcase == is_quorum_critical_test
+ orelse Testcase == is_mirror_sync_critical_test ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip, "not mixed versions compatible"};
+ _ ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase)
+ end;
init_per_testcase(Testcase, Config) ->
rabbit_ct_helpers:testcase_started(Config, Testcase).
diff --git a/deps/rabbitmq_management/test/rabbit_mgmt_only_http_SUITE.erl b/deps/rabbitmq_management/test/rabbit_mgmt_only_http_SUITE.erl
index 38bb2bac1a..f760e1609c 100644
--- a/deps/rabbitmq_management/test/rabbit_mgmt_only_http_SUITE.erl
+++ b/deps/rabbitmq_management/test/rabbit_mgmt_only_http_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_only_http_SUITE).
@@ -17,7 +17,7 @@
-import(rabbit_mgmt_test_util, [assert_list/2, assert_item/2, test_item/2,
assert_keys/2, assert_no_keys/2,
http_get/2, http_get/3, http_get/5,
- http_get_no_map/2,
+ http_get_as_proplist/2,
http_put/4, http_put/6,
http_post/4, http_post/6,
http_upload_raw/8,
@@ -261,6 +261,33 @@ vhosts_test(Config) ->
?assert(not maps:is_key(recv_oct, GetFirst)),
?assert(maps:is_key(cluster_state, GetFirst)),
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% these won't pass for older 3.8 nodes
+ ok;
+ false ->
+ %% PUT can update metadata (description, tags)
+ Desc0 = "desc 0",
+ Meta0 = [
+ {description, Desc0},
+ {tags, "tag1,tag2"}
+ ],
+ http_put(Config, "/vhosts/myvhost", Meta0, {group, '2xx'}),
+ #{description := Desc1, tags := Tags1} = http_get(Config, "/vhosts/myvhost", ?OK),
+ ?assertEqual(Desc0, Desc1),
+ ?assertEqual([<<"tag1">>, <<"tag2">>], Tags1),
+
+ Desc2 = "desc 2",
+ Meta2 = [
+ {description, Desc2},
+ {tags, "tag3"}
+ ],
+ http_put(Config, "/vhosts/myvhost", Meta2, {group, '2xx'}),
+ #{description := Desc3, tags := Tags3} = http_get(Config, "/vhosts/myvhost", ?OK),
+ ?assertEqual(Desc2, Desc3),
+ ?assertEqual([<<"tag3">>], Tags3)
+ end,
+
%% Check individually
Get = http_get(Config, "/vhosts/%2F", ?OK),
assert_item(#{name => <<"/">>}, Get),
@@ -612,7 +639,7 @@ queues_well_formed_json_test(Config) ->
http_put(Config, "/queues/%2F/foo", Good, {group, '2xx'}),
http_put(Config, "/queues/%2F/baz", Good, {group, '2xx'}),
- Queues = http_get_no_map(Config, "/queues/%2F"),
+ Queues = http_get_as_proplist(Config, "/queues/%2F"),
%% Ensure keys are unique
[begin
Sorted = lists:sort(Q),
diff --git a/deps/rabbitmq_management/test/rabbit_mgmt_rabbitmqadmin_SUITE.erl b/deps/rabbitmq_management/test/rabbit_mgmt_rabbitmqadmin_SUITE.erl
index 7a192f225a..f077b61b07 100644
--- a/deps/rabbitmq_management/test/rabbit_mgmt_rabbitmqadmin_SUITE.erl
+++ b/deps/rabbitmq_management/test/rabbit_mgmt_rabbitmqadmin_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_rabbitmqadmin_SUITE).
@@ -108,6 +108,7 @@ host(Config) ->
{ok, _} = run(Config, ["show", "overview"]),
{ok, _} = run(Config, ["--host", "localhost", "show", "overview"]),
{error, _, _} = run(Config, ["--host", "some-host-that-does-not-exist",
+ "--request-timeout", "5",
"show", "overview"]).
base_uri(Config) ->
@@ -116,6 +117,7 @@ base_uri(Config) ->
{ok, _} = run(Config, ["--base-uri", "http://localhost", "--vhost", "/", "list", "exchanges"]),
{ok, _} = run(Config, ["--base-uri", "http://localhost/", "--vhost", "/", "list", "exchanges"]),
{error, _, _} = run(Config, ["--base-uri", "https://some-host-that-does-not-exist:15672/",
+ "--request-timeout", "5",
"list", "exchanges"]),
{error, _, _} = run(Config, ["--base-uri", "http://localhost:15672/", "--vhost", "some-vhost-that-does-not-exist",
"list", "exchanges"]).
diff --git a/deps/rabbitmq_management/test/rabbit_mgmt_runtime_parameters_util.erl b/deps/rabbitmq_management/test/rabbit_mgmt_runtime_parameters_util.erl
index 0ac911b7c0..8d81231c2a 100644
--- a/deps/rabbitmq_management/test/rabbit_mgmt_runtime_parameters_util.erl
+++ b/deps/rabbitmq_management/test/rabbit_mgmt_runtime_parameters_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_runtime_parameters_util).
diff --git a/deps/rabbitmq_management/test/rabbit_mgmt_stats_SUITE.erl b/deps/rabbitmq_management/test/rabbit_mgmt_stats_SUITE.erl
index 7ebb04da69..138ed07270 100644
--- a/deps/rabbitmq_management/test/rabbit_mgmt_stats_SUITE.erl
+++ b/deps/rabbitmq_management/test/rabbit_mgmt_stats_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_stats_SUITE).
diff --git a/deps/rabbitmq_management/test/rabbit_mgmt_test_db_SUITE.erl b/deps/rabbitmq_management/test/rabbit_mgmt_test_db_SUITE.erl
index 03a36ee138..f5aec756a6 100644
--- a/deps/rabbitmq_management/test/rabbit_mgmt_test_db_SUITE.erl
+++ b/deps/rabbitmq_management/test/rabbit_mgmt_test_db_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_test_db_SUITE).
diff --git a/deps/rabbitmq_management/test/rabbit_mgmt_test_unit_SUITE.erl b/deps/rabbitmq_management/test/rabbit_mgmt_test_unit_SUITE.erl
index 32194bd5c8..2ae8d94f39 100644
--- a/deps/rabbitmq_management/test/rabbit_mgmt_test_unit_SUITE.erl
+++ b/deps/rabbitmq_management/test/rabbit_mgmt_test_unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_test_unit_SUITE).
diff --git a/deps/rabbitmq_management/test/stats_SUITE.erl b/deps/rabbitmq_management/test/stats_SUITE.erl
index 99de1a532e..da39114ef8 100644
--- a/deps/rabbitmq_management/test/stats_SUITE.erl
+++ b/deps/rabbitmq_management/test/stats_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(stats_SUITE).
diff --git a/deps/rabbitmq_management_agent/BUILD.bazel b/deps/rabbitmq_management_agent/BUILD.bazel
new file mode 100644
index 0000000000..2ced79729d
--- /dev/null
+++ b/deps/rabbitmq_management_agent/BUILD.bazel
@@ -0,0 +1,111 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_ENV = """[
+ {rates_mode, basic},
+ {sample_retention_policies,
+ %% List of {MaxAgeInSeconds, SampleEveryNSeconds}
+ [{global, [{605, 5}, {3660, 60}, {29400, 600}, {86400, 1800}]},
+ {basic, [{605, 5}, {3600, 60}]},
+ {detailed, [{605, 5}]}]}
+ ]"""
+
+APP_NAME = "rabbitmq_management_agent"
+
+APP_DESCRIPTION = "RabbitMQ Management Agent"
+
+APP_MODULE = "rabbit_mgmt_agent_app"
+
+EXTRA_APPS = [
+ "xmerl",
+ "mnesia",
+ "ranch",
+ "ssl",
+ "crypto",
+ "public_key",
+]
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_cli:rabbitmqctl",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ extra_apps = EXTRA_APPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt_apps = list(EXTRA_APPS)
+
+plt_apps.remove("ranch")
+
+plt(
+ name = "base_plt",
+ apps = plt_apps,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_management_agent"
+
+suites = [
+ rabbitmq_suite(
+ name = "exometer_slide_SUITE",
+ size = "medium",
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "metrics_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_mgmt_gc_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_suite(
+ name = "rabbit_mgmt_slide_SUITE",
+ size = "small",
+ runtime_deps = [
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ],
+ deps = [
+ "@proper//:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_management_agent/CONTRIBUTING.md b/deps/rabbitmq_management_agent/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_management_agent/CONTRIBUTING.md
+++ b/deps/rabbitmq_management_agent/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_management_agent/Makefile b/deps/rabbitmq_management_agent/Makefile
index 4b3ebf8f66..4df0236560 100644
--- a/deps/rabbitmq_management_agent/Makefile
+++ b/deps/rabbitmq_management_agent/Makefile
@@ -29,6 +29,6 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
+include ../../rabbitmq-components.mk
TEST_DEPS := $(filter-out rabbitmq_test,$(TEST_DEPS))
-include erlang.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_management_agent/erlang.mk b/deps/rabbitmq_management_agent/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_management_agent/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_management_agent/include/rabbit_mgmt_agent.hrl b/deps/rabbitmq_management_agent/include/rabbit_mgmt_agent.hrl
new file mode 100644
index 0000000000..08883763f3
--- /dev/null
+++ b/deps/rabbitmq_management_agent/include/rabbit_mgmt_agent.hrl
@@ -0,0 +1,9 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-define(MANAGEMENT_PG_SCOPE, rabbitmq_management).
+-define(MANAGEMENT_PG_GROUP, management_db).
diff --git a/deps/rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl b/deps/rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl
index cb2d20db91..4e1fbc0a4a 100644
--- a/deps/rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl
+++ b/deps/rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License at
-%% https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%% License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is Pivotal Software, Inc.
-%% Copyright (c) 2010-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-type(event_type() :: queue_stats | queue_exchange_stats | vhost_stats
diff --git a/deps/rabbitmq_management_agent/include/rabbit_mgmt_records.hrl b/deps/rabbitmq_management_agent/include/rabbit_mgmt_records.hrl
index 92e26b5357..0745c9dba1 100644
--- a/deps/rabbitmq_management_agent/include/rabbit_mgmt_records.hrl
+++ b/deps/rabbitmq_management_agent/include/rabbit_mgmt_records.hrl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License at
-%% https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
-%% License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Original Code is RabbitMQ Management Console.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-record(context, {user,
diff --git a/deps/rabbitmq_management_agent/priv/schema/rabbitmq_management_agent.schema b/deps/rabbitmq_management_agent/priv/schema/rabbitmq_management_agent.schema
index fa8a76725a..3d66d9aaee 100644
--- a/deps/rabbitmq_management_agent/priv/schema/rabbitmq_management_agent.schema
+++ b/deps/rabbitmq_management_agent/priv/schema/rabbitmq_management_agent.schema
@@ -2,3 +2,4 @@
%% Also the management application will refuse to start if metrics collection is disabled
{mapping, "management_agent.disable_metrics_collector", "rabbitmq_management_agent.disable_metrics_collector",
[{datatype, {enum, [true, false]}}]}.
+{mapping, "management_agent.filter_aggregated_queue_metrics_pattern", "rabbitmq_management_agent.filter_aggregated_queue_metrics_pattern", [{datatype, string}]}. \ No newline at end of file
diff --git a/deps/rabbitmq_management_agent/rabbitmq-components.mk b/deps/rabbitmq_management_agent/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_management_agent/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_management_agent/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ResetStatsDbCommand.erl b/deps/rabbitmq_management_agent/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ResetStatsDbCommand.erl
index bc6bdbdc25..ef6ac55b66 100644
--- a/deps/rabbitmq_management_agent/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ResetStatsDbCommand.erl
+++ b/deps/rabbitmq_management_agent/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ResetStatsDbCommand.erl
@@ -2,13 +2,15 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ResetStatsDbCommand').
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref({'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1}).
+
-export([
usage/0,
validate/2,
diff --git a/deps/rabbitmq_management_agent/src/exometer_slide.erl b/deps/rabbitmq_management_agent/src/exometer_slide.erl
index 2c4e4c6d35..d140bea4cf 100644
--- a/deps/rabbitmq_management_agent/src/exometer_slide.erl
+++ b/deps/rabbitmq_management_agent/src/exometer_slide.erl
@@ -44,7 +44,7 @@
%% @end
%%
%%
-%% All modifications are (C) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% All modifications are (C) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%% The Initial Developer of the Original Code is Basho Technologies, Inc.
-module(exometer_slide).
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_app.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_app.erl
index e889815c2f..63da294f1e 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_app.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_agent_app).
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_config.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_config.erl
index e8d074e891..893d602106 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_config.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_config.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_agent_config).
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup.erl
index 0c4a5465e9..e7ea315edd 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup.erl
@@ -2,19 +2,17 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_agent_sup).
-%% pg2 is deprecated in OTP 23.
--compile(nowarn_deprecated_function).
-
-behaviour(supervisor).
-include_lib("rabbit_common/include/rabbit.hrl").
-include_lib("rabbit_common/include/rabbit_core_metrics.hrl").
-include("rabbit_mgmt_metrics.hrl").
+-include("rabbit_mgmt_agent.hrl").
-export([init/1]).
-export([start_link/0]).
@@ -24,7 +22,12 @@ init([]) ->
ExternalStats = {rabbit_mgmt_external_stats,
{rabbit_mgmt_external_stats, start_link, []},
permanent, 5000, worker, [rabbit_mgmt_external_stats]},
- {ok, {{one_for_one, 100, 10}, [ExternalStats] ++ MCs}}.
+ Flags = #{
+ strategy => one_for_one,
+ intensity => 100,
+ period => 50
+ },
+ {ok, {Flags, [ExternalStats] ++ MCs}}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
@@ -33,8 +36,7 @@ start_link() ->
maybe_enable_metrics_collector() ->
case application:get_env(rabbitmq_management_agent, disable_metrics_collector, false) of
false ->
- pg2:create(management_db),
- ok = pg2:join(management_db, self()),
+ ok = pg:join(?MANAGEMENT_PG_SCOPE, ?MANAGEMENT_PG_GROUP, self()),
ST = {rabbit_mgmt_storage, {rabbit_mgmt_storage, start_link, []},
permanent, ?WORKER_WAIT, worker, [rabbit_mgmt_storage]},
MD = {delegate_management_sup, {delegate_sup, start_link, [5, ?DELEGATE_PREFIX]},
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup_sup.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup_sup.erl
index 17ffa35307..29770b76f3 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup_sup.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_agent_sup_sup.erl
@@ -2,27 +2,50 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_agent_sup_sup).
--behaviour(supervisor2).
+-behaviour(supervisor).
-export([init/1]).
-export([start_link/0, start_child/0]).
-include_lib("rabbit_common/include/rabbit.hrl").
+-include("rabbit_mgmt_agent.hrl").
start_child() ->
- supervisor2:start_child(?MODULE, sup()).
+ supervisor:start_child(?MODULE, sup()).
sup() ->
- {rabbit_mgmt_agent_sup, {rabbit_mgmt_agent_sup, start_link, []},
- temporary, ?SUPERVISOR_WAIT, supervisor, [rabbit_mgmt_agent_sup]}.
+ #{
+ id => rabbit_mgmt_agent_sup,
+ start => {rabbit_mgmt_agent_sup, start_link, []},
+ restart => temporary,
+ shutdown => ?SUPERVISOR_WAIT,
+ type => supervisor,
+ modules => [rabbit_mgmt_agent_sup]
+ }.
init([]) ->
- {ok, {{one_for_one, 0, 1}, [sup()]}}.
+ Flags = #{
+ strategy => one_for_one,
+ intensity => 0,
+ period => 1
+ },
+ PgScope = #{
+ id => ?MANAGEMENT_PG_SCOPE,
+ start => {pg, start_link, [?MANAGEMENT_PG_SCOPE]},
+ restart => temporary,
+ shutdown => ?SUPERVISOR_WAIT,
+ modules => []
+ },
+ Specs = [
+ PgScope,
+ sup()
+ ],
+ {ok, {Flags, Specs}}.
start_link() ->
- supervisor2:start_link({local, ?MODULE}, ?MODULE, []).
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_data.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_data.erl
index d73c8a3819..5ae2418268 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_data.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_data.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_data).
@@ -282,7 +282,7 @@ augment_channel_pids(_Pid, ChPids) ->
lists:map(fun (ChPid) -> augment_channel_pid(ChPid) end, ChPids).
augment_channel_pid(Pid) ->
- Ch = lookup_element(channel_created_stats, Pid, 3),
+ Ch = lookup_channel_with_fallback_to_connection(Pid),
Conn = lookup_element(connection_created_stats, pget(connection, Ch), 3),
case Conn of
[] -> %% If the connection has just been opened, we might not yet have the data
@@ -297,6 +297,26 @@ augment_channel_pid(Pid) ->
{peer_host, pget(peer_host, Conn)}]
end.
+lookup_channel_with_fallback_to_connection(ChannelOrConnectionPid) ->
+ % stream consumers report a stream connection PID for their channel PID,
+ % so we adapt to this here
+ case lookup_element(channel_created_stats, ChannelOrConnectionPid, 3) of
+ [] ->
+ case lookup_element(connection_created_stats, ChannelOrConnectionPid, 3) of
+ [] ->
+ % not a channel and not a connection, not much we can do here
+ [{pid, ChannelOrConnectionPid}];
+ Conn ->
+ [{name, <<"">>},
+ {pid, ChannelOrConnectionPid},
+ {number, 0},
+ {user, pget(user, Conn)},
+ {connection, ChannelOrConnectionPid}]
+ end;
+ Ch ->
+ Ch
+ end.
+
augment_connection_pid(Pid) ->
Conn = lookup_element(connection_created_stats, Pid, 3),
case Conn of
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_data_compat.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_data_compat.erl
index 9fd127aff5..c7f9649046 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_data_compat.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_data_compat.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_data_compat).
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_db_handler.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_db_handler.erl
index c1e43223d7..1eaad7ff53 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_db_handler.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_db_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_db_handler).
@@ -49,7 +49,7 @@ handle_force_fine_statistics() ->
rabbit_log:warning(
"force_fine_statistics set to ~p; ignored.~n"
"Replaced by {rates_mode, none} in the rabbitmq_management "
- "application.~n", [X])
+ "application.", [X])
end.
%%----------------------------------------------------------------------------
@@ -58,7 +58,7 @@ ensure_statistics_enabled() ->
ForceStats = rates_mode() =/= none,
handle_force_fine_statistics(),
{ok, StatsLevel} = application:get_env(rabbit, collect_statistics),
- rabbit_log:info("Management plugin: using rates mode '~p'~n", [rates_mode()]),
+ rabbit_log:info("Management plugin: using rates mode '~p'", [rates_mode()]),
case {ForceStats, StatsLevel} of
{true, fine} ->
ok;
@@ -93,7 +93,15 @@ handle_info(_Info, State) ->
{ok, State}.
terminate(_Arg, _State) ->
+ ensure_statistics_disabled(),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
+
+ensure_statistics_disabled() ->
+ %% Reset the default values, see Makefile
+ _ = rabbit_log:info("Management plugin: to stop collect_statistics."),
+ application:set_env(rabbit, collect_statistics, none),
+ application:set_env(rabbit, collect_statistics_interval, 5000),
+ ok = rabbit:force_event_refresh(erlang:make_ref()).
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_external_stats.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_external_stats.erl
index 5e92d8394c..6f3846de61 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_external_stats.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_external_stats.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_external_stats).
@@ -66,7 +66,7 @@ get_used_fd(State0) ->
end
catch
_:Error ->
- State2 = log_fd_error("Could not infer the number of file handles used: ~p~n", [Error], State0),
+ State2 = log_fd_error("Could not infer the number of file handles used: ~p", [Error], State0),
{State2, 0}
end.
@@ -89,7 +89,7 @@ get_used_fd({unix, BSD}, State0)
UsedFd = length(lists:filter(F, string:tokens(Output, "\n"))),
{State0, UsedFd}
catch _:Error:Stacktrace ->
- State1 = log_fd_error("Could not parse fstat output:~n~s~n~p~n",
+ State1 = log_fd_error("Could not parse fstat output:~n~s~n~p",
[Output, {Error, Stacktrace}], State0),
{State1, 0}
end;
@@ -100,7 +100,7 @@ get_used_fd({unix, _}, State0) ->
Res = os:cmd(Cmd),
case string:right(Res, 7) of
"failed\n" ->
- State1 = log_fd_error("Could not obtain lsof output~n", [], State0),
+ State1 = log_fd_error("Could not obtain lsof output", [], State0),
{State1, 0};
_ ->
UsedFd = string:words(Res, $\n) - 1,
@@ -170,7 +170,7 @@ get_used_fd({win32, _}, State0) ->
"handle.exe /accepteula -s -p " ++ os:getpid() ++ " 2> nul"),
case Handle of
[] ->
- State1 = log_fd_error("Could not find handle.exe, please install from sysinternals~n", [], State0),
+ State1 = log_fd_error("Could not find handle.exe, please install from sysinternals", [], State0),
{State1, 0};
_ ->
case find_files_line(string:tokens(Handle, "\r\n")) of
@@ -178,7 +178,7 @@ get_used_fd({win32, _}, State0) ->
State1 = log_fd_error("handle.exe output did not contain "
"a line beginning with ' File ', unable "
"to determine used file descriptor "
- "count: ~p~n", [Handle], State0),
+ "count: ~p", [Handle], State0),
{State1, 0};
UsedFd ->
{State0, UsedFd}
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_ff.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_ff.erl
index c8173c1244..1afaa52f79 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_ff.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_ff.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2018-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_ff).
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_format.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_format.erl
index 4c9e8c189f..c8634e4e24 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_format.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_format.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_format).
@@ -30,6 +30,7 @@
-export([args_hash/1]).
-import(rabbit_misc, [pget/2, pget/3, pset/3]).
+-import(rabbit_data_coercion, [to_binary/1]).
-include_lib("rabbit_common/include/rabbit.hrl").
-include_lib("rabbit_common/include/rabbit_framing.hrl").
@@ -176,7 +177,9 @@ protocol(unknown) ->
protocol(Version = {_Major, _Minor, _Revision}) ->
protocol({'AMQP', Version});
protocol({Family, Version}) ->
- print("~s ~s", [Family, protocol_version(Version)]).
+ print("~s ~s", [Family, protocol_version(Version)]);
+protocol(Protocol) when is_binary(Protocol) ->
+ print("~s", [Protocol]).
protocol_version(Arbitrary)
when is_list(Arbitrary) -> Arbitrary;
@@ -215,15 +218,16 @@ internal_user(User) ->
{password_hash, base64:encode(internal_user:get_password_hash(User))},
{hashing_algorithm, rabbit_auth_backend_internal:hashing_module_for_user(
User)},
- {tags, tags(internal_user:get_tags(User))},
+ {tags, tags_as_binaries(internal_user:get_tags(User))},
{limits, internal_user:get_limits(User)}].
user(User) ->
[{name, User#user.username},
- {tags, tags(User#user.tags)}].
+ {tags, tags_as_binaries(User#user.tags)}].
+
+tags_as_binaries(Tags) ->
+ [to_binary(T) || T <- Tags].
-tags(Tags) ->
- list_to_binary(string:join([atom_to_list(T) || T <- Tags], ",")).
listener(#listener{node = Node, protocol = Protocol,
ip_address = IPAddress, port = Port, opts=Opts}) ->
@@ -338,6 +342,7 @@ queue(Q) when ?is_amqqueue(Q) ->
Type = case amqqueue:get_type(Q) of
rabbit_classic_queue -> classic;
rabbit_quorum_queue -> quorum;
+ rabbit_stream_queue -> stream;
T -> T
end,
format(
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_gc.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_gc.erl
index 99ddc89a8e..8231f804a3 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_gc.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_gc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_gc).
@@ -105,7 +105,7 @@ gc_exchanges() ->
gc_process_and_entity(channel_exchange_stats_fine_stats, GbSet).
gc_nodes() ->
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ Nodes = rabbit_nodes:all(),
GbSet = gb_sets:from_list(Nodes),
gc_entity(node_stats, GbSet),
gc_entity(node_coarse_stats, GbSet),
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_collector.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_collector.erl
index 298f17a18d..9ab07fd808 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_collector.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_collector.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_metrics_collector).
@@ -26,7 +26,8 @@
-import(rabbit_mgmt_data, [lookup_element/3]).
-record(state, {table, interval, policies, rates_mode, lookup_queue,
- lookup_exchange, old_aggr_stats}).
+ lookup_exchange, old_aggr_stats,
+ filter_aggregated_queue_metrics_pattern}).
%% Data is stored in ETS tables:
%% * One ETS table per metric (queue_stats, channel_stats_deliver_stats...)
@@ -59,7 +60,7 @@ reset_lookups(Table) ->
gen_server:call(name(Table), reset_lookups, infinity).
init([Table]) ->
- {RatesMode, Policies} = load_config(),
+ {RatesMode, Policies, FilterPattern} = load_config(),
Policy = retention_policy(Table),
Interval = take_smaller(proplists:get_value(Policy, Policies, [])) * 1000,
erlang:send_after(Interval, self(), collect_metrics),
@@ -70,7 +71,8 @@ init([Table]) ->
rates_mode = RatesMode,
old_aggr_stats = #{},
lookup_queue = fun queue_exists/1,
- lookup_exchange = fun exchange_exists/1}}.
+ lookup_exchange = fun exchange_exists/1,
+ filter_aggregated_queue_metrics_pattern = FilterPattern}}.
handle_call(reset_lookups, _From, State) ->
{reply, ok, State#state{lookup_queue = fun queue_exists/1,
@@ -463,19 +465,19 @@ aggregate_entry({Name, Ready, Unack, Msgs, Red}, NextStats, Ops0,
#state{table = queue_coarse_metrics,
old_aggr_stats = Old,
policies = {BPolicies, _, GPolicies},
- lookup_queue = QueueFun} = State) ->
+ lookup_queue = QueueFun,
+ filter_aggregated_queue_metrics_pattern = Pattern} = State) ->
Stats = ?vhost_msg_stats(Ready, Unack, Msgs),
Diff = get_difference(Name, Stats, State),
- Ops1 = insert_entry_ops(vhost_msg_stats, vhost(Name), true, Diff, Ops0,
- GPolicies),
+ Ops1 = maybe_insert_entry_ops(Name, Pattern, vhost_msg_stats, vhost(Name),
+ true, Diff, Ops0, GPolicies),
Ops2 = case QueueFun(Name) of
true ->
QPS =?queue_process_stats(Red),
O1 = insert_entry_ops(queue_process_stats, Name, false, QPS,
Ops1, BPolicies),
QMS = ?queue_msg_stats(Ready, Unack, Msgs),
- insert_entry_ops(queue_msg_stats, Name, false, QMS,
- O1, BPolicies);
+ insert_entry_ops(queue_msg_stats, Name, false, QMS, O1, BPolicies);
_ ->
Ops1
end,
@@ -583,6 +585,17 @@ insert_entry_op(Table, Key, Entry, Ops) ->
end, {insert_entry, Entry}, TableOps0),
maps:put(Table, TableOps, Ops).
+maybe_insert_entry_ops(Name, Pattern, Table, Id, Incr, Entry, Ops, Policies) ->
+ case needs_filtering_out(Name, Pattern) of
+ true -> Ops;
+ false -> insert_entry_ops(Table, Id, Incr, Entry, Ops, Policies)
+ end.
+
+needs_filtering_out(_, undefined) ->
+ false;
+needs_filtering_out(#resource{name = Name}, Pattern) ->
+ match == re:run(Name, Pattern, [{capture, none}]).
+
insert_entry_ops(Table, Id, Incr, Entry, Ops, Policies) ->
lists:foldl(fun({Size, Interval}, Acc) ->
Key = {Id, Size, Interval, Incr},
@@ -688,7 +701,8 @@ index_table(node_node_coarse_stats, node) -> node_node_coarse_stats_node_index.
load_config() ->
RatesMode = rabbit_mgmt_agent_config:get_env(rates_mode),
Policies = rabbit_mgmt_agent_config:get_env(sample_retention_policies, []),
- {RatesMode, Policies}.
+ FilterPattern = rabbit_mgmt_agent_config:get_env(filter_aggregated_queue_metrics_pattern),
+ {RatesMode, Policies, FilterPattern}.
ceil(X) when X < 0 ->
trunc(X);
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_gc.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_gc.erl
index f1ae48e0e4..4e47f2a80d 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_gc.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_metrics_gc.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_metrics_gc).
diff --git a/deps/rabbitmq_management_agent/src/rabbit_mgmt_storage.erl b/deps/rabbitmq_management_agent/src/rabbit_mgmt_storage.erl
index 4c5c8c18ef..9032fe9231 100644
--- a/deps/rabbitmq_management_agent/src/rabbit_mgmt_storage.erl
+++ b/deps/rabbitmq_management_agent/src/rabbit_mgmt_storage.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_storage).
-behaviour(gen_server2).
diff --git a/deps/rabbitmq_management_agent/test/exometer_slide_SUITE.erl b/deps/rabbitmq_management_agent/test/exometer_slide_SUITE.erl
index abdf24853d..cf87ebd10a 100644
--- a/deps/rabbitmq_management_agent/test/exometer_slide_SUITE.erl
+++ b/deps/rabbitmq_management_agent/test/exometer_slide_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(exometer_slide_SUITE).
diff --git a/deps/rabbitmq_management_agent/test/metrics_SUITE.erl b/deps/rabbitmq_management_agent/test/metrics_SUITE.erl
index 227a04b21c..e50ab3d5ec 100644
--- a/deps/rabbitmq_management_agent/test/metrics_SUITE.erl
+++ b/deps/rabbitmq_management_agent/test/metrics_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(metrics_SUITE).
-compile(export_all).
diff --git a/deps/rabbitmq_management_agent/test/rabbit_mgmt_gc_SUITE.erl b/deps/rabbitmq_management_agent/test/rabbit_mgmt_gc_SUITE.erl
index b5ee4e9ce2..b5dc5d39bc 100644
--- a/deps/rabbitmq_management_agent/test/rabbit_mgmt_gc_SUITE.erl
+++ b/deps/rabbitmq_management_agent/test/rabbit_mgmt_gc_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_gc_SUITE).
@@ -78,13 +78,20 @@ end_per_group(_, Config) ->
Config.
init_per_testcase(quorum_queue_stats = Testcase, Config) ->
- case rabbit_ct_broker_helpers:enable_feature_flag(Config, quorum_queue) of
- ok ->
- rabbit_ct_helpers:testcase_started(Config, Testcase),
- rabbit_ct_helpers:run_steps(
- Config, rabbit_ct_client_helpers:setup_steps());
- Skip ->
- Skip
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip, "not mixed versions compatible"};
+ _ ->
+ case rabbit_ct_broker_helpers:enable_feature_flag(Config, quorum_queue) of
+ ok ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ rabbit_ct_helpers:run_steps(
+ Config, rabbit_ct_client_helpers:setup_steps());
+ {skip, _} = Skip ->
+ Skip;
+ Other ->
+ {skip, Other}
+ end
end;
init_per_testcase(Testcase, Config) ->
rabbit_ct_helpers:testcase_started(Config, Testcase),
diff --git a/deps/rabbitmq_management_agent/test/rabbit_mgmt_slide_SUITE.erl b/deps/rabbitmq_management_agent/test/rabbit_mgmt_slide_SUITE.erl
index 0261606bd5..9191cb81ac 100644
--- a/deps/rabbitmq_management_agent/test/rabbit_mgmt_slide_SUITE.erl
+++ b/deps/rabbitmq_management_agent/test/rabbit_mgmt_slide_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mgmt_slide_SUITE).
diff --git a/deps/rabbitmq_mqtt/.gitignore b/deps/rabbitmq_mqtt/.gitignore
index 7f34fc74e2..4ef626d07e 100644
--- a/deps/rabbitmq_mqtt/.gitignore
+++ b/deps/rabbitmq_mqtt/.gitignore
@@ -17,6 +17,7 @@
/sbin.lock
/xrefr
debug/*
+*.plt
test/config_schema_SUITE_data/schema/
test/.idea/*
diff --git a/deps/rabbitmq_mqtt/BUILD.bazel b/deps/rabbitmq_mqtt/BUILD.bazel
new file mode 100644
index 0000000000..eb06be0365
--- /dev/null
+++ b/deps/rabbitmq_mqtt/BUILD.bazel
@@ -0,0 +1,177 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_mqtt"
+
+APP_DESCRIPTION = "RabbitMQ MQTT Adapter"
+
+APP_MODULE = "rabbit_mqtt"
+
+APP_ENV = """[
+ {default_user, <<"guest">>},
+ {default_pass, <<"guest">>},
+ {ssl_cert_login,false},
+ %% To satisfy an unfortunate expectation from popular MQTT clients.
+ {allow_anonymous, true},
+ {vhost, <<"/">>},
+ {exchange, <<"amq.topic">>},
+ {subscription_ttl, 86400000}, %% 24 hours
+ {retained_message_store, rabbit_mqtt_retained_msg_store_dets},
+ %% only used by DETS store
+ {retained_message_store_dets_sync_interval, 2000},
+ {prefetch, 10},
+ {ssl_listeners, []},
+ {tcp_listeners, [1883]},
+ {num_tcp_acceptors, 10},
+ {num_ssl_acceptors, 10},
+ {tcp_listen_options, [{backlog, 128},
+ {nodelay, true}]},
+ {proxy_protocol, false},
+ {sparkplug, false}
+ ]"""
+
+FIRST_SRCS = [
+ "src/rabbit_mqtt_retained_msg_store.erl",
+]
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_cli:rabbitmqctl",
+]
+
+DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@ra//:bazel_erlang_lib",
+ "@ranch//:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ first_srcs = FIRST_SRCS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+erlc(
+ name = "rabbit_auth_backend_mqtt_mock",
+ testonly = True,
+ srcs = [
+ "test/rabbit_auth_backend_mqtt_mock.erl",
+ ],
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+)
+
+PACKAGE = "deps/rabbitmq_mqtt"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "auth_SUITE",
+ additional_beam = [
+ ":rabbit_auth_backend_mqtt_mock",
+ ],
+ runtime_deps = [
+ "@emqttc//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "cluster_SUITE",
+ size = "large",
+ flaky = True,
+ runtime_deps = [
+ "@emqttc//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "command_SUITE",
+ runtime_deps = [
+ "@emqttc//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "java_SUITE",
+ flaky = True,
+ ),
+ rabbitmq_suite(
+ name = "mqtt_machine_SUITE",
+ size = "small",
+ ),
+ rabbitmq_suite(
+ name = "processor_SUITE",
+ size = "small",
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "proxy_protocol_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "reader_SUITE",
+ runtime_deps = [
+ "@emqttc//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "retainer_SUITE",
+ runtime_deps = [
+ "@emqttc//:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "util_SUITE",
+ size = "small",
+ data = [
+ "test/rabbitmq_mqtt.app",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_mqtt/Makefile b/deps/rabbitmq_mqtt/Makefile
index 1dbbfe037d..9c2bcbdca0 100644
--- a/deps/rabbitmq_mqtt/Makefile
+++ b/deps/rabbitmq_mqtt/Makefile
@@ -46,8 +46,8 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
clean::
diff --git a/deps/rabbitmq_mqtt/erlang.mk b/deps/rabbitmq_mqtt/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_mqtt/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_mqtt/include/mqtt_machine.hrl b/deps/rabbitmq_mqtt/include/mqtt_machine.hrl
index b670c7b32e..3d5ee22c33 100644
--- a/deps/rabbitmq_mqtt/include/mqtt_machine.hrl
+++ b/deps/rabbitmq_mqtt/include/mqtt_machine.hrl
@@ -2,7 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
--record(machine_state, {client_ids = #{}}).
+-record(machine_state, {client_ids = #{},
+ pids = #{},
+ %% add acouple of fields for future extensibility
+ reserved_1,
+ reserved_2}).
+
diff --git a/deps/rabbit_common/include/rabbit_log.hrl b/deps/rabbitmq_mqtt/include/mqtt_machine_v0.hrl
index 9ce908e997..50ac3bb236 100644
--- a/deps/rabbit_common/include/rabbit_log.hrl
+++ b/deps/rabbitmq_mqtt/include/mqtt_machine_v0.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
--define(LAGER_SINK, rabbit_log_lager_event).
+-record(machine_state, {client_ids = #{}}).
diff --git a/deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl b/deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl
index 912f5ad46f..f829ecf40e 100644
--- a/deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl
+++ b/deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(CLIENT_ID_MAXLEN, 23).
diff --git a/deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl b/deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl
index 2b06da502b..d1ac8640e4 100644
--- a/deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl
+++ b/deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(PROTOCOL_NAMES, [{3, "MQIsdp"}, {4, "MQTT"}]).
diff --git a/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl b/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl
index 52b61b5924..bd91a9b00d 100644
--- a/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl
+++ b/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl
@@ -2,5 +2,5 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
diff --git a/deps/rabbitmq_mqtt/rabbitmq-components.mk b/deps/rabbitmq_mqtt/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_mqtt/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand.erl b/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand.erl
index f0aefb526b..6b75e63bda 100644
--- a/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand.erl
+++ b/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand').
@@ -10,6 +10,8 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref({'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1}).
+
-export([scopes/0,
switches/0,
aliases/0,
diff --git a/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand.erl b/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand.erl
index a5745a7f58..bbacf9245f 100644
--- a/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand.erl
+++ b/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand').
@@ -10,6 +10,14 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref([
+ {'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', prepare_info_keys, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.RpcStream', receive_list_items, 7},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', validate_info_keys, 2},
+ {'Elixir.Enum', join, 2}
+]).
+
-export([formatter/0,
scopes/0,
switches/0,
diff --git a/deps/rabbitmq_mqtt/src/mqtt_machine.erl b/deps/rabbitmq_mqtt/src/mqtt_machine.erl
index 334aa9e32c..dda63a4543 100644
--- a/deps/rabbitmq_mqtt/src/mqtt_machine.erl
+++ b/deps/rabbitmq_mqtt/src/mqtt_machine.erl
@@ -2,14 +2,16 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(mqtt_machine).
-behaviour(ra_machine).
-include("mqtt_machine.hrl").
--export([init/1,
+-export([version/0,
+ which_module/1,
+ init/1,
apply/3,
state_enter/2,
notify_connection/2]).
@@ -24,6 +26,10 @@
-type command() :: {register, client_id(), pid()} |
{unregister, client_id(), pid()} |
list.
+version() -> 1.
+
+which_module(1) -> ?MODULE;
+which_module(0) -> mqtt_machine_v0.
-spec init(config()) -> state().
init(_Conf) ->
@@ -31,29 +37,62 @@ init(_Conf) ->
-spec apply(map(), command(), state()) ->
{state(), reply(), ra_machine:effects()}.
-apply(_Meta, {register, ClientId, Pid}, #machine_state{client_ids = Ids} = State0) ->
- {Effects, Ids1} =
+apply(_Meta, {register, ClientId, Pid},
+ #machine_state{client_ids = Ids,
+ pids = Pids0} = State0) ->
+ {Effects, Ids1, Pids} =
case maps:find(ClientId, Ids) of
{ok, OldPid} when Pid =/= OldPid ->
Effects0 = [{demonitor, process, OldPid},
{monitor, process, Pid},
- {mod_call, ?MODULE, notify_connection, [OldPid, duplicate_id]}],
- {Effects0, maps:remove(ClientId, Ids)};
- _ ->
- Effects0 = [{monitor, process, Pid}],
- {Effects0, Ids}
+ {mod_call, ?MODULE, notify_connection,
+ [OldPid, duplicate_id]}],
+ Pids2 = case maps:take(OldPid, Pids0) of
+ error ->
+ Pids0;
+ {[ClientId], Pids1} ->
+ Pids1;
+ {ClientIds, Pids1} ->
+ Pids1#{ClientId => lists:delete(ClientId, ClientIds)}
+ end,
+ Pids3 = maps:update_with(Pid, fun(CIds) -> [ClientId | CIds] end,
+ [ClientId], Pids2),
+ {Effects0, maps:remove(ClientId, Ids), Pids3};
+
+ {ok, Pid} ->
+ {[], Ids, Pids0};
+ error ->
+ Pids1 = maps:update_with(Pid, fun(CIds) -> [ClientId | CIds] end,
+ [ClientId], Pids0),
+ Effects0 = [{monitor, process, Pid}],
+ {Effects0, Ids, Pids1}
end,
- State = State0#machine_state{client_ids = maps:put(ClientId, Pid, Ids1)},
+ State = State0#machine_state{client_ids = maps:put(ClientId, Pid, Ids1),
+ pids = Pids},
{State, ok, Effects};
-apply(Meta, {unregister, ClientId, Pid}, #machine_state{client_ids = Ids} = State0) ->
+apply(Meta, {unregister, ClientId, Pid}, #machine_state{client_ids = Ids,
+ pids = Pids0} = State0) ->
State = case maps:find(ClientId, Ids) of
- {ok, Pid} -> State0#machine_state{client_ids = maps:remove(ClientId, Ids)};
- %% don't delete client id that might belong to a newer connection
- %% that kicked the one with Pid out
- {ok, _AnotherPid} -> State0;
- error -> State0
- end,
+ {ok, Pid} ->
+ Pids = case maps:get(Pid, Pids0, undefined) of
+ undefined ->
+ Pids0;
+ [ClientId] ->
+ maps:remove(Pid, Pids0);
+ Cids ->
+ Pids0#{Pid => lists:delete(ClientId, Cids)}
+ end,
+
+ State0#machine_state{client_ids = maps:remove(ClientId, Ids),
+ pids = Pids};
+ %% don't delete client id that might belong to a newer connection
+ %% that kicked the one with Pid out
+ {ok, _AnotherPid} ->
+ State0;
+ error ->
+ State0
+ end,
Effects0 = [{demonitor, process, Pid}],
%% snapshot only when the map has changed
Effects = case State of
@@ -69,18 +108,21 @@ apply(_Meta, {down, DownPid, noconnection}, State) ->
Effect = {monitor, node, node(DownPid)},
{State, ok, Effect};
-apply(Meta, {down, DownPid, _}, #machine_state{client_ids = Ids} = State0) ->
- Ids1 = maps:filter(fun (_ClientId, Pid) when Pid =:= DownPid ->
- false;
- (_, _) ->
- true
- end, Ids),
- State = State0#machine_state{client_ids = Ids1},
- Delta = maps:keys(Ids) -- maps:keys(Ids1),
- Effects = lists:map(fun(Id) ->
- [{mod_call, rabbit_log, debug,
- ["MQTT connection with client id '~s' failed", [Id]]}] end, Delta),
- {State, ok, Effects ++ snapshot_effects(Meta, State)};
+apply(Meta, {down, DownPid, _}, #machine_state{client_ids = Ids,
+ pids = Pids0} = State0) ->
+ case maps:get(DownPid, Pids0, undefined) of
+ undefined ->
+ {State0, ok, []};
+ ClientIds ->
+ Ids1 = maps:without(ClientIds, Ids),
+ State = State0#machine_state{client_ids = Ids1,
+ pids = maps:remove(DownPid, Pids0)},
+ Effects = lists:map(fun(Id) ->
+ [{mod_call, rabbit_log, debug,
+ ["MQTT connection with client id '~s' failed", [Id]]}]
+ end, ClientIds),
+ {State, ok, Effects ++ snapshot_effects(Meta, State)}
+ end;
apply(_Meta, {nodeup, Node}, State) ->
%% Work out if any pids that were disconnected are still
@@ -91,26 +133,42 @@ apply(_Meta, {nodeup, Node}, State) ->
apply(_Meta, {nodedown, _Node}, State) ->
{State, ok};
-apply(Meta, {leave, Node}, #machine_state{client_ids = Ids} = State0) ->
- Ids1 = maps:filter(fun (_ClientId, Pid) -> node(Pid) =/= Node end, Ids),
- Delta = maps:keys(Ids) -- maps:keys(Ids1),
-
- Effects = lists:foldl(fun (ClientId, Acc) ->
- Pid = maps:get(ClientId, Ids),
- [
- {demonitor, process, Pid},
- {mod_call, ?MODULE, notify_connection, [Pid, decommission_node]},
- {mod_call, rabbit_log, debug,
- ["MQTT will remove client ID '~s' from known "
- "as its node has been decommissioned", [ClientId]]}
- ] ++ Acc
- end, [], Delta),
-
- State = State0#machine_state{client_ids = Ids1},
+apply(Meta, {leave, Node}, #machine_state{client_ids = Ids,
+ pids = Pids0} = State0) ->
+ {Keep, Remove} = maps:fold(
+ fun (ClientId, Pid, {In, Out}) ->
+ case node(Pid) =/= Node of
+ true ->
+ {In#{ClientId => Pid}, Out};
+ false ->
+ {In, Out#{ClientId => Pid}}
+ end
+ end, {#{}, #{}}, Ids),
+ Effects = maps:fold(fun (ClientId, _Pid, Acc) ->
+ Pid = maps:get(ClientId, Ids),
+ [
+ {demonitor, process, Pid},
+ {mod_call, ?MODULE, notify_connection, [Pid, decommission_node]},
+ {mod_call, rabbit_log, debug,
+ ["MQTT will remove client ID '~s' from known "
+ "as its node has been decommissioned", [ClientId]]}
+ ] ++ Acc
+ end, [], Remove),
+
+ State = State0#machine_state{client_ids = Keep,
+ pids = maps:without(maps:keys(Remove), Pids0)},
{State, ok, Effects ++ snapshot_effects(Meta, State)};
-
+apply(_Meta, {machine_version, 0, 1}, {machine_state, Ids}) ->
+ Pids = maps:fold(
+ fun(Id, Pid, Acc) ->
+ maps:update_with(Pid,
+ fun(CIds) -> [Id | CIds] end,
+ [Id], Acc)
+ end, #{}, Ids),
+ {#machine_state{client_ids = Ids,
+ pids = Pids}, ok, []};
apply(_Meta, Unknown, State) ->
- error_logger:error_msg("MQTT Raft state machine received unknown command ~p~n", [Unknown]),
+ logger:error("MQTT Raft state machine v1 received unknown command ~p", [Unknown]),
{State, {error, {unknown_command, Unknown}}, []}.
state_enter(leader, State) ->
diff --git a/deps/rabbitmq_mqtt/src/mqtt_machine_v0.erl b/deps/rabbitmq_mqtt/src/mqtt_machine_v0.erl
new file mode 100644
index 0000000000..77cea727dc
--- /dev/null
+++ b/deps/rabbitmq_mqtt/src/mqtt_machine_v0.erl
@@ -0,0 +1,134 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+-module(mqtt_machine_v0).
+-behaviour(ra_machine).
+
+-include("mqtt_machine_v0.hrl").
+
+-export([init/1,
+ apply/3,
+ state_enter/2,
+ notify_connection/2]).
+
+-type state() :: #machine_state{}.
+
+-type config() :: map().
+
+-type reply() :: {ok, term()} | {error, term()}.
+-type client_id() :: term().
+
+-type command() :: {register, client_id(), pid()} |
+ {unregister, client_id(), pid()} |
+ list.
+
+-spec init(config()) -> state().
+init(_Conf) ->
+ #machine_state{}.
+
+-spec apply(map(), command(), state()) ->
+ {state(), reply(), ra_machine:effects()}.
+apply(_Meta, {register, ClientId, Pid}, #machine_state{client_ids = Ids} = State0) ->
+ {Effects, Ids1} =
+ case maps:find(ClientId, Ids) of
+ {ok, OldPid} when Pid =/= OldPid ->
+ Effects0 = [{demonitor, process, OldPid},
+ {monitor, process, Pid},
+ {mod_call, ?MODULE, notify_connection, [OldPid, duplicate_id]}],
+ {Effects0, maps:remove(ClientId, Ids)};
+ _ ->
+ Effects0 = [{monitor, process, Pid}],
+ {Effects0, Ids}
+ end,
+ State = State0#machine_state{client_ids = maps:put(ClientId, Pid, Ids1)},
+ {State, ok, Effects};
+
+apply(Meta, {unregister, ClientId, Pid}, #machine_state{client_ids = Ids} = State0) ->
+ State = case maps:find(ClientId, Ids) of
+ {ok, Pid} -> State0#machine_state{client_ids = maps:remove(ClientId, Ids)};
+ %% don't delete client id that might belong to a newer connection
+ %% that kicked the one with Pid out
+ {ok, _AnotherPid} -> State0;
+ error -> State0
+ end,
+ Effects0 = [{demonitor, process, Pid}],
+ %% snapshot only when the map has changed
+ Effects = case State of
+ State0 -> Effects0;
+ _ -> Effects0 ++ snapshot_effects(Meta, State)
+ end,
+ {State, ok, Effects};
+
+apply(_Meta, {down, DownPid, noconnection}, State) ->
+ %% Monitor the node the pid is on (see {nodeup, Node} below)
+ %% so that we can detect when the node is re-connected and discover the
+ %% actual fate of the connection processes on it
+ Effect = {monitor, node, node(DownPid)},
+ {State, ok, Effect};
+
+apply(Meta, {down, DownPid, _}, #machine_state{client_ids = Ids} = State0) ->
+ Ids1 = maps:filter(fun (_ClientId, Pid) when Pid =:= DownPid ->
+ false;
+ (_, _) ->
+ true
+ end, Ids),
+ State = State0#machine_state{client_ids = Ids1},
+ Delta = maps:keys(Ids) -- maps:keys(Ids1),
+ Effects = lists:map(fun(Id) ->
+ [{mod_call, rabbit_log, debug,
+ ["MQTT connection with client id '~s' failed", [Id]]}] end, Delta),
+ {State, ok, Effects ++ snapshot_effects(Meta, State)};
+
+apply(_Meta, {nodeup, Node}, State) ->
+ %% Work out if any pids that were disconnected are still
+ %% alive.
+ %% Re-request the monitor for the pids on the now-back node.
+ Effects = [{monitor, process, Pid} || Pid <- all_pids(State), node(Pid) == Node],
+ {State, ok, Effects};
+apply(_Meta, {nodedown, _Node}, State) ->
+ {State, ok};
+
+apply(Meta, {leave, Node}, #machine_state{client_ids = Ids} = State0) ->
+ Ids1 = maps:filter(fun (_ClientId, Pid) -> node(Pid) =/= Node end, Ids),
+ Delta = maps:keys(Ids) -- maps:keys(Ids1),
+
+ Effects = lists:foldl(fun (ClientId, Acc) ->
+ Pid = maps:get(ClientId, Ids),
+ [
+ {demonitor, process, Pid},
+ {mod_call, ?MODULE, notify_connection, [Pid, decommission_node]},
+ {mod_call, rabbit_log, debug,
+ ["MQTT will remove client ID '~s' from known "
+ "as its node has been decommissioned", [ClientId]]}
+ ] ++ Acc
+ end, [], Delta),
+
+ State = State0#machine_state{client_ids = Ids1},
+ {State, ok, Effects ++ snapshot_effects(Meta, State)};
+
+apply(_Meta, Unknown, State) ->
+ logger:error("MQTT Raft state machine received an unknown command ~p", [Unknown]),
+ {State, {error, {unknown_command, Unknown}}, []}.
+
+state_enter(leader, State) ->
+ %% re-request monitors for all known pids, this would clean up
+ %% records for all connections are no longer around, e.g. right after node restart
+ [{monitor, process, Pid} || Pid <- all_pids(State)];
+state_enter(_, _) ->
+ [].
+
+%% ==========================
+
+%% Avoids blocking the Raft leader.
+notify_connection(Pid, Reason) ->
+ spawn(fun() -> gen_server2:cast(Pid, Reason) end).
+
+-spec snapshot_effects(map(), state()) -> ra_machine:effects().
+snapshot_effects(#{index := RaftIdx}, State) ->
+ [{release_cursor, RaftIdx, State}].
+
+all_pids(#machine_state{client_ids = Ids}) ->
+ maps:values(Ids).
diff --git a/deps/rabbitmq_mqtt/src/mqtt_node.erl b/deps/rabbitmq_mqtt/src/mqtt_node.erl
index 84dcd9b3a4..3c74499ed8 100644
--- a/deps/rabbitmq_mqtt/src/mqtt_node.erl
+++ b/deps/rabbitmq_mqtt/src/mqtt_node.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(mqtt_node).
@@ -12,6 +12,7 @@
-define(START_TIMEOUT, 100000).
-define(RETRY_INTERVAL, 5000).
-define(RA_OPERATION_TIMEOUT, 60000).
+-define(RA_SYSTEM, coordination).
node_id() ->
server_id(node()).
@@ -23,7 +24,7 @@ server_id(Node) ->
{?ID_NAME, Node}.
all_node_ids() ->
- [server_id(N) || N <- rabbit_mnesia:cluster_nodes(all),
+ [server_id(N) || N <- rabbit_nodes:all(),
can_participate_in_clientid_tracking(N)].
start() ->
@@ -37,7 +38,7 @@ start(_Delay, AttemptsLeft) when AttemptsLeft =< 0 ->
start(Delay, AttemptsLeft) ->
NodeId = server_id(),
Nodes = compatible_peer_servers(),
- case ra_directory:uid_of(?ID_NAME) of
+ case ra_directory:uid_of(?RA_SYSTEM, ?ID_NAME) of
undefined ->
case Nodes of
[] ->
@@ -64,8 +65,8 @@ start(Delay, AttemptsLeft) ->
end;
_ ->
join_peers(NodeId, Nodes),
- ra:restart_server(NodeId),
- ra:trigger_election(NodeId)
+ ra:restart_server(?RA_SYSTEM, NodeId),
+ ra:trigger_election(NodeId, ?RA_OPERATION_TIMEOUT)
end,
ok.
@@ -80,23 +81,22 @@ start_server() ->
Conf = #{cluster_name => ?ID_NAME,
id => NodeId,
uid => UId,
- friendly_name => ?ID_NAME,
+ friendly_name => atom_to_list(?ID_NAME),
initial_members => Nodes,
log_init_args => #{uid => UId},
tick_timeout => Timeout,
machine => {module, mqtt_machine, #{}}
},
- ra:start_server(Conf).
+ ra:start_server(?RA_SYSTEM, Conf).
trigger_election() ->
- ra:trigger_election(server_id()).
+ ra:trigger_election(server_id(), ?RA_OPERATION_TIMEOUT).
join_peers(_NodeId, []) ->
ok;
join_peers(NodeId, Nodes) ->
join_peers(NodeId, Nodes, 100).
-join_peers(_NodeId, [], _RetriesLeft) ->
- ok;
+
join_peers(_NodeId, _Nodes, RetriesLeft) when RetriesLeft =:= 0 ->
rabbit_log:error("MQTT: exhausted all attempts while trying to rejoin cluster peers");
join_peers(NodeId, Nodes, RetriesLeft) ->
@@ -119,7 +119,7 @@ leave(Node) ->
NodeId = server_id(),
ToLeave = server_id(Node),
try
- ra:leave_and_delete_server(NodeId, ToLeave)
+ ra:leave_and_delete_server(?RA_SYSTEM, NodeId, ToLeave)
catch
exit:{{nodedown, Node}, _} ->
nodedown
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt.erl
index 192f8a7fee..80cb6e04ff 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl
index 341ee46850..8940cab656 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_collector).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_info.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_info.erl
index 4e73a19253..5ddc57bb71 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_info.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_info.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2017-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_connection_info).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl
index 0a150caa38..20412a1a0c 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_connection_sup).
@@ -12,13 +12,13 @@
-include_lib("rabbit_common/include/rabbit.hrl").
--export([start_link/4, start_keepalive_link/0]).
+-export([start_link/3, start_keepalive_link/0]).
-export([init/1]).
%%----------------------------------------------------------------------------
-start_link(Ref, _Sock, _Transport, []) ->
+start_link(Ref, _Transport, []) ->
{ok, SupPid} = supervisor2:start_link(?MODULE, []),
{ok, KeepaliveSup} = supervisor2:start_child(
SupPid,
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl
index 950c5bd6c4..e82b4fa169 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_frame).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_internal_event_handler.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_internal_event_handler.erl
index 2a371b4142..cc2b66eb41 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_internal_event_handler.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_internal_event_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_internal_event_handler).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl
index c3a25096e6..d904450cb9 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_processor).
@@ -66,9 +66,18 @@ process_frame(#mqtt_frame{ fixed = #mqtt_frame_fixed{ type = Type }},
{error, connect_expected, PState};
process_frame(Frame = #mqtt_frame{ fixed = #mqtt_frame_fixed{ type = Type }},
PState) ->
- case process_request(Type, Frame, PState) of
+ try process_request(Type, Frame, PState) of
{ok, PState1} -> {ok, PState1, PState1#proc_state.connection};
Ret -> Ret
+ catch
+ _:{{shutdown, {server_initiated_close, 403, _}}, _} ->
+ %% NB: MQTT spec says we should ack normally, ie pretend
+ %% there was no auth error, but here we are closing the
+ %% connection with an error. This is what happens anyway
+ %% if there is an authorization failure at the AMQP 0-9-1
+ %% client level. And error was already logged by AMQP
+ %% channel, so no need for custom logging.
+ {error, access_refused, PState}
end.
add_client_id_to_adapter_info(ClientId, #amqp_adapter_info{additional_info = AdditionalInfo0} = AdapterInfo) ->
@@ -120,7 +129,7 @@ process_request(?CONNECT,
case creds(Username, Password, SSLLoginName) of
nocreds ->
rabbit_core_metrics:auth_attempt_failed(Ip, <<>>, mqtt),
- rabbit_log_connection:error("MQTT login failed: no credentials provided~n"),
+ rabbit_log_connection:error("MQTT login failed: no credentials provided"),
{?CONNACK_CREDENTIALS, PState1};
{invalid_creds, {undefined, Pass}} when is_list(Pass) ->
rabbit_core_metrics:auth_attempt_failed(Ip, <<>>, mqtt),
@@ -133,8 +142,7 @@ process_request(?CONNECT,
{UserBin, PassBin} ->
case process_login(UserBin, PassBin, ProtoVersion, PState1) of
connack_dup_auth ->
- {SessionPresent0, PState2} = maybe_clean_sess(PState1),
- {{?CONNACK_ACCEPT, SessionPresent0}, PState2};
+ maybe_clean_sess(PState1);
{?CONNACK_ACCEPT, Conn, VHost, AState} ->
case rabbit_mqtt_collector:register(ClientId, self()) of
{ok, Corr} ->
@@ -156,8 +164,7 @@ process_request(?CONNECT,
retainer_pid = RetainerPid,
auth_state = AState,
register_state = {pending, Corr}},
- {SessionPresent1, PState4} = maybe_clean_sess(PState3),
- {{?CONNACK_ACCEPT, SessionPresent1}, PState4};
+ maybe_clean_sess(PState3);
%% e.g. this node was removed from the MQTT cluster members
{error, _} = Err ->
rabbit_log_connection:error("MQTT cannot accept a connection: "
@@ -256,44 +263,43 @@ process_request(?SUBSCRIBE,
message_id = StateMsgId,
mqtt2amqp_fun = Mqtt2AmqpFun} = PState0) ->
rabbit_log_connection:debug("Received a SUBSCRIBE for topic(s) ~p", [Topics]),
- check_subscribe(Topics, fun() ->
- {QosResponse, PState1} =
- lists:foldl(fun (#mqtt_topic{name = TopicName,
- qos = Qos}, {QosList, PState}) ->
- SupportedQos = supported_subs_qos(Qos),
- {Queue, #proc_state{subscriptions = Subs} = PState1} =
- ensure_queue(SupportedQos, PState),
- RoutingKey = Mqtt2AmqpFun(TopicName),
- Binding = #'queue.bind'{
- queue = Queue,
- exchange = Exchange,
- routing_key = RoutingKey},
- #'queue.bind_ok'{} = amqp_channel:call(Channel, Binding),
- SupportedQosList = case maps:find(TopicName, Subs) of
- {ok, L} -> [SupportedQos|L];
- error -> [SupportedQos]
- end,
- {[SupportedQos | QosList],
- PState1 #proc_state{
- subscriptions =
- maps:put(TopicName, SupportedQosList, Subs)}}
- end, {[], PState0}, Topics),
- SendFun(#mqtt_frame{fixed = #mqtt_frame_fixed{type = ?SUBACK},
- variable = #mqtt_frame_suback{
- message_id = SubscribeMsgId,
- qos_table = QosResponse}}, PState1),
- %% we may need to send up to length(Topics) messages.
- %% if QoS is > 0 then we need to generate a message id,
- %% and increment the counter.
- StartMsgId = safe_max_id(SubscribeMsgId, StateMsgId),
- N = lists:foldl(fun (Topic, Acc) ->
- case maybe_send_retained_message(RPid, Topic, Acc, PState1) of
- {true, X} -> Acc + X;
- false -> Acc
- end
- end, StartMsgId, Topics),
- {ok, PState1#proc_state{message_id = N}}
- end, PState0);
+
+ {QosResponse, PState1} =
+ lists:foldl(fun (#mqtt_topic{name = TopicName,
+ qos = Qos}, {QosList, PState}) ->
+ SupportedQos = supported_subs_qos(Qos),
+ {Queue, #proc_state{subscriptions = Subs} = PState1} =
+ ensure_queue(SupportedQos, PState),
+ RoutingKey = Mqtt2AmqpFun(TopicName),
+ Binding = #'queue.bind'{
+ queue = Queue,
+ exchange = Exchange,
+ routing_key = RoutingKey},
+ #'queue.bind_ok'{} = amqp_channel:call(Channel, Binding),
+ SupportedQosList = case maps:find(TopicName, Subs) of
+ {ok, L} -> [SupportedQos|L];
+ error -> [SupportedQos]
+ end,
+ {[SupportedQos | QosList],
+ PState1 #proc_state{
+ subscriptions =
+ maps:put(TopicName, SupportedQosList, Subs)}}
+ end, {[], PState0}, Topics),
+ SendFun(#mqtt_frame{fixed = #mqtt_frame_fixed{type = ?SUBACK},
+ variable = #mqtt_frame_suback{
+ message_id = SubscribeMsgId,
+ qos_table = QosResponse}}, PState1),
+ %% we may need to send up to length(Topics) messages.
+ %% if QoS is > 0 then we need to generate a message id,
+ %% and increment the counter.
+ StartMsgId = safe_max_id(SubscribeMsgId, StateMsgId),
+ N = lists:foldl(fun (Topic, Acc) ->
+ case maybe_send_retained_message(RPid, Topic, Acc, PState1) of
+ {true, X} -> Acc + X;
+ false -> Acc
+ end
+ end, StartMsgId, Topics),
+ {ok, PState1#proc_state{message_id = N}};
process_request(?UNSUBSCRIBE,
#mqtt_frame{
@@ -489,35 +495,65 @@ delivery_qos(Tag, Headers, #proc_state{ consumer_tags = {_, Tag} }) ->
maybe_clean_sess(PState = #proc_state { clean_sess = false,
connection = Conn,
+ auth_state = #auth_state{vhost = VHost},
client_id = ClientId }) ->
- SessionPresent = session_present(Conn, ClientId),
- {_Queue, PState1} = ensure_queue(?QOS_1, PState),
- {SessionPresent, PState1};
+ SessionPresent = session_present(VHost, ClientId),
+ case SessionPresent of
+ false ->
+ %% ensure_queue/2 not only ensures that queue is created, but also starts consuming from it.
+ %% Let's avoid creating that queue until explicitly asked by a client.
+ %% Then publish-only clients, that connect with clean_sess=true due to some misconfiguration,
+ %% will consume less resources.
+ {{?CONNACK_ACCEPT, SessionPresent}, PState};
+ true ->
+ try ensure_queue(?QOS_1, PState) of
+ {_Queue, PState1} -> {{?CONNACK_ACCEPT, SessionPresent}, PState1}
+ catch
+ exit:({{shutdown, {server_initiated_close, 403, _}}, _}) ->
+ %% Connection is not yet propagated to #proc_state{}, let's close it here
+ catch amqp_connection:close(Conn),
+ rabbit_log_connection:error("MQTT cannot recover a session, user is missing permissions"),
+ {?CONNACK_SERVER, PState};
+ C:E:S ->
+ %% Connection is not yet propagated to
+ %% #proc_state{}, let's close it here.
+ %% This is an exceptional situation anyway, but
+ %% doing this will prevent second crash from
+ %% amqp client being logged.
+ catch amqp_connection:close(Conn),
+ erlang:raise(C, E, S)
+ end
+ end;
maybe_clean_sess(PState = #proc_state { clean_sess = true,
connection = Conn,
+ auth_state = #auth_state{vhost = VHost},
client_id = ClientId }) ->
{_, Queue} = rabbit_mqtt_util:subcription_queue_name(ClientId),
{ok, Channel} = amqp_connection:open_channel(Conn),
- ok = try amqp_channel:call(Channel, #'queue.delete'{ queue = Queue }) of
- #'queue.delete_ok'{} -> ok
- catch
- exit:_Error -> ok
- after
- amqp_channel:close(Channel)
- end,
- {false, PState}.
-
-session_present(Conn, ClientId) ->
+ case session_present(VHost, ClientId) of
+ false ->
+ {{?CONNACK_ACCEPT, false}, PState};
+ true ->
+ try amqp_channel:call(Channel, #'queue.delete'{ queue = Queue }) of
+ #'queue.delete_ok'{} -> {{?CONNACK_ACCEPT, false}, PState}
+ catch
+ exit:({{shutdown, {server_initiated_close, 403, _}}, _}) ->
+ %% Connection is not yet propagated to #proc_state{}, let's close it here
+ catch amqp_connection:close(Conn),
+ rabbit_log_connection:error("MQTT cannot start a clean session: "
+ "`configure` permission missing for queue `~p`", [Queue]),
+ {?CONNACK_SERVER, PState}
+ after
+ catch amqp_channel:close(Channel)
+ end
+ end.
+
+session_present(VHost, ClientId) ->
{_, QueueQ1} = rabbit_mqtt_util:subcription_queue_name(ClientId),
- Declare = #'queue.declare'{queue = QueueQ1,
- passive = true},
- {ok, Channel} = amqp_connection:open_channel(Conn),
- try
- amqp_channel:call(Channel, Declare),
- amqp_channel:close(Channel),
- true
- catch exit:{{shutdown, {server_initiated_close, ?NOT_FOUND, _Text}}, _} ->
- false
+ QueueName = rabbit_misc:r(VHost, queue, QueueQ1),
+ case rabbit_amqqueue:lookup(QueueName) of
+ {ok, _} -> true;
+ {error, not_found} -> false
end.
make_will_msg(#mqtt_frame_connect{ will_flag = false }) ->
@@ -551,8 +587,8 @@ process_login(UserBin, PassBin, ProtoVersion,
peer_addr = Addr}) ->
{ok, {_, _, _, ToPort}} = rabbit_net:socket_ends(Sock, inbound),
{VHostPickedUsing, {VHost, UsernameBin}} = get_vhost(UserBin, SslLoginName, ToPort),
- rabbit_log_connection:info(
- "MQTT vhost picked using ~s~n",
+ rabbit_log_connection:debug(
+ "MQTT vhost picked using ~s",
[human_readable_vhost_lookup_strategy(VHostPickedUsing)]),
RemoteAddress = list_to_binary(inet:ntoa(Addr)),
case rabbit_vhost:exists(VHost) of
@@ -578,34 +614,34 @@ process_login(UserBin, PassBin, ProtoVersion,
mqtt),
amqp_connection:close(Connection),
rabbit_log_connection:warning(
- "MQTT login failed for ~p access_refused "
- "(access must be from localhost)~n",
+ "MQTT login failed for user ~s: "
+ "this user's access is restricted to localhost",
[binary_to_list(UsernameBin)]),
?CONNACK_AUTH
end;
{error, {auth_failure, Explanation}} ->
rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, mqtt),
- rabbit_log_connection:error("MQTT login failed for user '~p' auth_failure: ~s~n",
+ rabbit_log_connection:error("MQTT login failed for user '~s', authentication failed: ~s",
[binary_to_list(UserBin), Explanation]),
?CONNACK_CREDENTIALS;
{error, access_refused} ->
rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, mqtt),
- rabbit_log_connection:warning("MQTT login failed for user '~p': access_refused "
- "(vhost access not allowed)~n",
+ rabbit_log_connection:warning("MQTT login failed for user '~s': "
+ "virtual host access not allowed",
[binary_to_list(UserBin)]),
?CONNACK_AUTH;
{error, not_allowed} ->
rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, mqtt),
%% when vhost allowed for TLS connection
- rabbit_log_connection:warning("MQTT login failed for ~p access_refused "
- "(vhost access not allowed)~n",
+ rabbit_log_connection:warning("MQTT login failed for user '~s': "
+ "virtual host access not allowed",
[binary_to_list(UserBin)]),
?CONNACK_AUTH
end;
false ->
rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, mqtt),
- rabbit_log_connection:error("MQTT login failed for user '~p' auth_failure: vhost ~s does not exist~n",
- [binary_to_list(UserBin), VHost]),
+ rabbit_log_connection:error("MQTT login failed for user '~s': virtual host '~s' does not exist",
+ [UserBin, VHost]),
?CONNACK_CREDENTIALS
end.
@@ -819,7 +855,7 @@ send_will(PState = #proc_state{will_msg = WillMsg = #mqtt_msg{retain = Retain,
end;
Error ->
rabbit_log:warning(
- "Could not send last will: ~p~n",
+ "Could not send last will: ~p",
[Error])
end,
case ChQos1 of
@@ -832,6 +868,22 @@ send_will(PState = #proc_state{will_msg = WillMsg = #mqtt_msg{retain = Retain,
end,
PState #proc_state{ channels = {undefined, undefined} }.
+%% TODO amqp_pub/2 is publishing messages asynchronously, using
+%% amqp_channel:cast_flow/3
+%%
+%% It does access check using check_publish/3 before submitting, but
+%% this is superfluous, as actual publishing will do the same
+%% check. While check results cached, it's still some unnecessary
+%% work.
+%%
+%% And the only reason to keep it that way is that it prevents useless
+%% crash messages flooding logs, as there is no code to handle async
+%% channel crash gracefully.
+%%
+%% It'd be better to rework the whole thing, removing performance
+%% penalty and some 50 lines of duplicate code. Maybe unlinking from
+%% channel, and adding it as a child of connection supervisor instead.
+%% But exact details are not yet clear.
amqp_pub(undefined, PState) ->
PState;
@@ -941,28 +993,15 @@ handle_ra_event(register_timeout, PState) ->
PState;
handle_ra_event(Evt, PState) ->
%% log these?
- rabbit_log:debug("unhandled ra_event: ~w ~n", [Evt]),
+ rabbit_log:debug("unhandled ra_event: ~w ", [Evt]),
PState.
-%% NB: check_*: MQTT spec says we should ack normally, ie pretend there
-%% was no auth error, but here we are closing the connection with an error. This
-%% is what happens anyway if there is an authorization failure at the AMQP 0-9-1 client level.
-
check_publish(TopicName, Fn, PState) ->
case check_topic_access(TopicName, write, PState) of
ok -> Fn();
_ -> {error, unauthorized, PState}
end.
-check_subscribe([], Fn, _) ->
- Fn();
-
-check_subscribe([#mqtt_topic{name = TopicName} | Topics], Fn, PState) ->
- case check_topic_access(TopicName, read, PState) of
- ok -> check_subscribe(Topics, Fn, PState);
- _ -> {error, unauthorized, PState}
- end.
-
check_topic_access(TopicName, Access,
#proc_state{
auth_state = #auth_state{user = User = #user{username = Username},
@@ -1003,10 +1042,10 @@ check_topic_access(TopicName, Access,
R
catch
_:{amqp_error, access_refused, Msg, _} ->
- rabbit_log:error("operation resulted in an error (access_refused): ~p~n", [Msg]),
+ rabbit_log:error("operation resulted in an error (access_refused): ~p", [Msg]),
{error, access_refused};
_:Error ->
- rabbit_log:error("~p~n", [Error]),
+ rabbit_log:error("~p", [Error]),
{error, access_refused}
end
end.
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl
index 39c0761321..7f7b8eb696 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_reader).
@@ -59,7 +59,7 @@ init([KeepaliveSup, Ref]) ->
RealSocket = rabbit_net:unwrap_socket(Sock),
case rabbit_net:connection_string(Sock, inbound) of
{ok, ConnStr} ->
- rabbit_log_connection:debug("MQTT accepting TCP connection ~p (~s)~n", [self(), ConnStr]),
+ rabbit_log_connection:debug("MQTT accepting TCP connection ~p (~s)", [self(), ConnStr]),
rabbit_alarm:register(
self(), {?MODULE, conserve_resources, []}),
ProcessorState = rabbit_mqtt_processor:initial_state(Sock,ssl_login_name(RealSocket)),
@@ -102,7 +102,7 @@ handle_call(Msg, From, State) ->
handle_cast(duplicate_id,
State = #state{ proc_state = PState,
conn_name = ConnName }) ->
- rabbit_log_connection:warning("MQTT disconnecting client ~p with duplicate id '~s'~n",
+ rabbit_log_connection:warning("MQTT disconnecting client ~p with duplicate id '~s'",
[ConnName, rabbit_mqtt_processor:info(client_id, PState)]),
{stop, {shutdown, duplicate_id}, State};
@@ -110,7 +110,7 @@ handle_cast(decommission_node,
State = #state{ proc_state = PState,
conn_name = ConnName }) ->
rabbit_log_connection:warning("MQTT disconnecting client ~p with client ID '~s' as its node is about"
- " to be decommissioned~n",
+ " to be decommissioned",
[ConnName, rabbit_mqtt_processor:info(client_id, PState)]),
{stop, {shutdown, decommission_node}, State};
@@ -185,7 +185,7 @@ handle_info({start_keepalives, Keepalive},
handle_info(keepalive_timeout, State = #state {conn_name = ConnStr,
proc_state = PState}) ->
- rabbit_log_connection:error("closing MQTT connection ~p (keepalive timeout)~n", [ConnStr]),
+ rabbit_log_connection:error("closing MQTT connection ~p (keepalive timeout)", [ConnStr]),
send_will_and_terminate(PState, {shutdown, keepalive_timeout}, State);
handle_info(emit_stats, State) ->
@@ -210,7 +210,7 @@ handle_pre_hibernate(State) ->
{hibernate, State}.
do_terminate({network_error, {ssl_upgrade_error, closed}, ConnStr}, _State) ->
- rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: connection closed~n",
+ rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: connection closed",
[ConnStr]);
do_terminate({network_error,
@@ -230,20 +230,20 @@ do_terminate({network_error,
{tls_alert, Alert}}, ConnStr}, _State) ->
log_tls_alert(Alert, ConnStr);
do_terminate({network_error, {ssl_upgrade_error, Reason}, ConnStr}, _State) ->
- rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: ~p~n",
+ rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: ~p",
[ConnStr, Reason]);
do_terminate({network_error, Reason, ConnStr}, _State) ->
- rabbit_log_connection:error("MQTT detected network error on ~s: ~p~n",
+ rabbit_log_connection:error("MQTT detected network error on ~s: ~p",
[ConnStr, Reason]);
do_terminate({network_error, Reason}, _State) ->
- rabbit_log_connection:error("MQTT detected network error: ~p~n", [Reason]);
+ rabbit_log_connection:error("MQTT detected network error: ~p", [Reason]);
do_terminate(normal, #state{proc_state = ProcState,
conn_name = ConnName}) ->
rabbit_mqtt_processor:close_connection(ProcState),
- rabbit_log_connection:info("closing MQTT connection ~p (~s)~n", [self(), ConnName]),
+ rabbit_log_connection:info("closing MQTT connection ~p (~s)", [self(), ConnName]),
ok;
do_terminate(_Reason, #state{proc_state = ProcState}) ->
@@ -267,17 +267,17 @@ ssl_login_name(Sock) ->
%%----------------------------------------------------------------------------
log_tls_alert(handshake_failure, ConnStr) ->
- rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: handshake failure~n",
+ rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: handshake failure",
[ConnStr]);
log_tls_alert(unknown_ca, ConnStr) ->
- rabbit_log_connection:error("MQTT detected TLS certificate verification error on ~s: alert 'unknown CA'~n",
+ rabbit_log_connection:error("MQTT detected TLS certificate verification error on ~s: alert 'unknown CA'",
[ConnStr]);
log_tls_alert(Alert, ConnStr) ->
- rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: alert ~s~n",
+ rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: alert ~s",
[ConnStr, Alert]).
log_new_connection(#state{conn_name = ConnStr, proc_state = PState}) ->
- rabbit_log_connection:info("accepting MQTT connection ~p (~s, client id: ~s)~n",
+ rabbit_log_connection:info("accepting MQTT connection ~p (~s, client id: ~s)",
[self(), ConnStr, rabbit_mqtt_processor:info(client_id, PState)]).
process_received_bytes(<<>>, State = #state{proc_state = ProcState,
@@ -310,41 +310,41 @@ process_received_bytes(Bytes,
connection = ConnPid });
%% PUBLISH and more
{error, unauthorized = Reason, ProcState1} ->
- rabbit_log_connection:error("MQTT connection ~s is closing due to an authorization failure~n", [ConnStr]),
+ rabbit_log_connection:error("MQTT connection ~s is closing due to an authorization failure", [ConnStr]),
{stop, {shutdown, Reason}, pstate(State, ProcState1)};
%% CONNECT frames only
{error, unauthenticated = Reason, ProcState1} ->
- rabbit_log_connection:error("MQTT connection ~s is closing due to an authentication failure~n", [ConnStr]),
+ rabbit_log_connection:error("MQTT connection ~s is closing due to an authentication failure", [ConnStr]),
{stop, {shutdown, Reason}, pstate(State, ProcState1)};
%% CONNECT frames only
{error, invalid_client_id = Reason, ProcState1} ->
- rabbit_log_connection:error("MQTT cannot accept connection ~s: client uses an invalid ID~n", [ConnStr]),
+ rabbit_log_connection:error("MQTT cannot accept connection ~s: client uses an invalid ID", [ConnStr]),
{stop, {shutdown, Reason}, pstate(State, ProcState1)};
%% CONNECT frames only
{error, unsupported_protocol_version = Reason, ProcState1} ->
- rabbit_log_connection:error("MQTT cannot accept connection ~s: incompatible protocol version~n", [ConnStr]),
+ rabbit_log_connection:error("MQTT cannot accept connection ~s: incompatible protocol version", [ConnStr]),
{stop, {shutdown, Reason}, pstate(State, ProcState1)};
{error, unavailable = Reason, ProcState1} ->
- rabbit_log_connection:error("MQTT cannot accept connection ~s due to an internal error or unavailable component~n",
+ rabbit_log_connection:error("MQTT cannot accept connection ~s due to an internal error or unavailable component",
[ConnStr]),
{stop, {shutdown, Reason}, pstate(State, ProcState1)};
{error, Reason, ProcState1} ->
- rabbit_log_connection:error("MQTT protocol error on connection ~s: ~p~n",
+ rabbit_log_connection:error("MQTT protocol error on connection ~s: ~p",
[ConnStr, Reason]),
{stop, {shutdown, Reason}, pstate(State, ProcState1)};
{error, Error} ->
- rabbit_log_connection:error("MQTT detected a framing error on connection ~s: ~p~n",
+ rabbit_log_connection:error("MQTT detected a framing error on connection ~s: ~p",
[ConnStr, Error]),
{stop, {shutdown, Error}, State};
{stop, ProcState1} ->
{stop, normal, pstate(State, ProcState1)}
end;
{error, {cannot_parse, Error, Stacktrace}} ->
- rabbit_log_connection:error("MQTT cannot parse a frame on connection '~s', unparseable payload: ~p, error: {~p, ~p} ~n",
+ rabbit_log_connection:error("MQTT cannot parse a frame on connection '~s', unparseable payload: ~p, error: {~p, ~p} ",
[ConnStr, Bytes, Error, Stacktrace]),
{stop, {shutdown, Error}, State};
{error, Error} ->
- rabbit_log_connection:error("MQTT detected a framing error on connection ~s: ~p~n",
+ rabbit_log_connection:error("MQTT detected a framing error on connection ~s: ~p",
[ConnStr, Error]),
{stop, {shutdown, Error}, State}
end.
@@ -382,7 +382,7 @@ network_error(closed,
State = #state{conn_name = ConnStr,
proc_state = PState}) ->
MqttConn = PState#proc_state.connection,
- Fmt = "MQTT connection ~p will terminate because peer closed TCP connection~n",
+ Fmt = "MQTT connection ~p will terminate because peer closed TCP connection",
Args = [ConnStr],
case MqttConn of
undefined -> rabbit_log_connection:debug(Fmt, Args);
@@ -393,7 +393,7 @@ network_error(closed,
network_error(Reason,
State = #state{conn_name = ConnStr,
proc_state = PState}) ->
- rabbit_log_connection:info("MQTT detected network error for ~p: ~p~n",
+ rabbit_log_connection:info("MQTT detected network error for ~p: ~p",
[ConnStr, Reason]),
send_will_and_terminate(PState, State).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl
index 4b3ee95743..38689c439a 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_retained_msg_store).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl
index 03c5942d35..ecc25ecf32 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_retained_msg_store_dets).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl
index 9080a6f4cf..1e620c9898 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_retained_msg_store_ets).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_noop.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_noop.erl
index 382ffbc63d..7addbd4a09 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_noop.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_noop.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_retained_msg_store_noop).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl
index 2aa873ecfb..69fad965ca 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_retainer).
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl
index 86b54ce3d7..4d07b784dc 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_retainer_sup).
@@ -42,7 +42,7 @@ delete_child(VHost) ->
init([]) ->
Mod = rabbit_mqtt_retainer:store_module(),
- rabbit_log:info("MQTT retained message store: ~p~n",
+ rabbit_log:info("MQTT retained message store: ~p",
[Mod]),
{ok, {{one_for_one, 5, 5}, child_specs(Mod, rabbit_vhost:list_names())}}.
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl
index c00be457d3..b842105fe5 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_sup).
@@ -20,6 +20,7 @@ start_link(Listeners, []) ->
init([{Listeners, SslListeners0}]) ->
NumTcpAcceptors = application:get_env(rabbitmq_mqtt, num_tcp_acceptors, 10),
+ ConcurrentConnsSups = application:get_env(rabbitmq_mqtt, num_conns_sups, 1),
{ok, SocketOpts} = application:get_env(rabbitmq_mqtt, tcp_listen_options),
{SslOpts, NumSslAcceptors, SslListeners}
= case SslListeners0 of
@@ -36,9 +37,10 @@ init([{Listeners, SslListeners0}]) ->
{rabbit_mqtt_retainer_sup, start_link, [{local, rabbit_mqtt_retainer_sup}]},
transient, ?SUPERVISOR_WAIT, supervisor, [rabbit_mqtt_retainer_sup]} |
listener_specs(fun tcp_listener_spec/1,
- [SocketOpts, NumTcpAcceptors], Listeners) ++
+ [SocketOpts, NumTcpAcceptors, ConcurrentConnsSups], Listeners) ++
listener_specs(fun ssl_listener_spec/1,
- [SocketOpts, SslOpts, NumSslAcceptors], SslListeners)]}}.
+ [SocketOpts, SslOpts, NumSslAcceptors, ConcurrentConnsSups],
+ SslListeners)]}}.
stop_listeners() ->
rabbit_networking:stop_ranch_listener_of_protocol(?TCP_PROTOCOL),
@@ -54,17 +56,17 @@ listener_specs(Fun, Args, Listeners) ->
Listener <- Listeners,
Address <- rabbit_networking:tcp_listener_addresses(Listener)].
-tcp_listener_spec([Address, SocketOpts, NumAcceptors]) ->
+tcp_listener_spec([Address, SocketOpts, NumAcceptors, ConcurrentConnsSups]) ->
rabbit_networking:tcp_listener_spec(
rabbit_mqtt_listener_sup, Address, SocketOpts,
transport(?TCP_PROTOCOL), rabbit_mqtt_connection_sup, [],
- mqtt, NumAcceptors, "MQTT TCP listener").
+ mqtt, NumAcceptors, ConcurrentConnsSups, "MQTT TCP listener").
-ssl_listener_spec([Address, SocketOpts, SslOpts, NumAcceptors]) ->
+ssl_listener_spec([Address, SocketOpts, SslOpts, NumAcceptors, ConcurrentConnsSups]) ->
rabbit_networking:tcp_listener_spec(
rabbit_mqtt_listener_sup, Address, SocketOpts ++ SslOpts,
transport(?TLS_PROTOCOL), rabbit_mqtt_connection_sup, [],
- 'mqtt/ssl', NumAcceptors, "MQTT TLS listener").
+ 'mqtt/ssl', NumAcceptors, ConcurrentConnsSups, "MQTT TLS listener").
transport(Protocol) ->
case Protocol of
diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl
index 0fbe7e8a85..a54e978495 100644
--- a/deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl
+++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_mqtt_util).
diff --git a/deps/rabbitmq_mqtt/test/auth_SUITE.erl b/deps/rabbitmq_mqtt/test/auth_SUITE.erl
index 7368139d95..b494939914 100644
--- a/deps/rabbitmq_mqtt/test/auth_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/auth_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(auth_SUITE).
-compile([export_all]).
@@ -10,13 +10,15 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(CONNECT_TIMEOUT, 10000).
+-define(WAIT_LOG_NO_CRASHES, {["Generic server.*terminating"], fun () -> exit(there_should_be_no_crashes) end}).
all() ->
[{group, anonymous_no_ssl_user},
{group, anonymous_ssl_user},
{group, no_ssl_user},
{group, ssl_user},
- {group, client_id_propagation}].
+ {group, client_id_propagation},
+ {group, authz_handling}].
groups() ->
[{anonymous_ssl_user, [],
@@ -59,6 +61,15 @@ groups() ->
]},
{client_id_propagation, [],
[client_id_propagation]
+ },
+ {authz_handling, [],
+ [no_queue_bind_permission,
+ no_queue_consume_permission,
+ no_queue_consume_permission_on_connect,
+ no_queue_delete_permission,
+ no_queue_declare_permission,
+ no_topic_read_permission,
+ no_topic_write_permission]
}
].
@@ -69,6 +80,23 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
Config.
+init_per_group(authz_handling, Config0) ->
+ User = <<"mqtt-user">>,
+ Password = <<"mqtt-password">>,
+ VHost = <<"mqtt-vhost">>,
+ MqttConfig = {rabbitmq_mqtt, [{default_user, User}
+ ,{default_pass, Password}
+ ,{allow_anonymous, true}
+ ,{vhost, VHost}
+ ,{exchange, <<"amq.topic">>}
+ ]},
+ Config1 = rabbit_ct_helpers:run_setup_steps(rabbit_ct_helpers:merge_app_env(Config0, MqttConfig),
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps()),
+ rabbit_ct_broker_helpers:add_user(Config1, User, Password),
+ rabbit_ct_broker_helpers:add_vhost(Config1, VHost),
+ [Log|_] = rabbit_ct_broker_helpers:rpc(Config1, 0, rabbit, log_locations, []),
+ [{mqtt_user, User}, {mqtt_vhost, VHost}, {mqtt_password, Password}, {log_location, Log}|Config1];
init_per_group(Group, Config) ->
Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
Config1 = rabbit_ct_helpers:set_config(Config, [
@@ -284,6 +312,26 @@ end_per_testcase(ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_ma
ok = rabbit_ct_broker_helpers:delete_vhost(Config, VHostForPortVHostMapping),
ok = rabbit_ct_broker_helpers:clear_global_parameter(Config, mqtt_port_to_vhost_mapping),
rabbit_ct_helpers:testcase_finished(Config, ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_mapping);
+end_per_testcase(Testcase, Config) when Testcase == no_queue_bind_permission;
+ Testcase == no_queue_consume_permission;
+ Testcase == no_queue_consume_permission_on_connect;
+ Testcase == no_queue_delete_permission;
+ Testcase == no_queue_declare_permission;
+ Testcase == no_topic_read_permission;
+ Testcase == no_topic_write_permission ->
+ %% So let's wait before logs are surely flushed
+ Marker = "MQTT_AUTH_SUITE_MARKER",
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_log, error, [Marker]),
+ wait_log(Config, erlang:system_time(microsecond) + 1000000,
+ [{[Marker], fun () -> stop end}]),
+
+ %% Preserve file contents in case some investigation is needed, before truncating.
+ file:copy(?config(log_location, Config), iolist_to_binary([?config(log_location, Config), ".", atom_to_binary(Testcase)])),
+
+ %% And provide an empy log file for the next test in this group
+ file:write_file(?config(log_location, Config), <<>>),
+
+ rabbit_ct_helpers:testcase_finished(Config, Testcase);
end_per_testcase(Testcase, Config) ->
rabbit_ct_helpers:testcase_finished(Config, Testcase).
@@ -399,7 +447,20 @@ connect_ssl(Config) ->
client_id_propagation(Config) ->
ok = rabbit_ct_broker_helpers:add_code_path_to_all_nodes(Config,
- rabbit_auth_backend_mqtt_mock),
+ rabbit_auth_backend_mqtt_mock),
+ %% setup creates the ETS table required for the mqtt auth mock
+ %% it blocks indefinitely so we need to spawn
+ Self = self(),
+ _ = spawn(fun () ->
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_mqtt_mock,
+ setup,
+ [Self])
+ end),
+ %% the setup process will notify us
+ receive
+ ok -> ok
+ after 3000 -> ok
+ end,
ClientId = <<"client-id-propagation">>,
{ok, C} = connect_user(<<"client-id-propagation">>, <<"client-id-propagation">>,
Config, ClientId),
@@ -435,9 +496,237 @@ client_id_propagation(Config) ->
emqttc:disconnect(C).
+%% These tests try to cover all operations that are listed in the
+%% table in https://www.rabbitmq.com/access-control.html#authorisation
+%% and which MQTT plugin tries to perform.
+%%
+%% Silly MQTT doesn't allow us to see any error codes in the protocol,
+%% so the only non-intrusive way to check for `access_refused`
+%% codepath is by checking logs. Every testcase from this group
+%% truncates log file beforehand, so it'd be easier to analyze. There
+%% is additional wait in the corresponding end_per_testcase that
+%% ensures that logs were for the current testcase were completely
+%% flushed, and won't contaminate following tests from this group.
+%%
+%% Then each test-case asserts that logs contain following things:
+%% 1) Handling of access_refused error handler in MQTT reader:
+%% https://github.com/rabbitmq/rabbitmq-server/blob/69dc53fb8938c7f135bf0002b0904cf28c25c571/deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl#L332
+%% 2) Mention of which AMQP operation caused that error (that one is
+%% kinda superflous, it just makes sure that every AMQP operation
+%% in MQTT plugin was tested)
+no_queue_bind_permission(Config) ->
+ test_subscribe_permissions_combination(<<".*">>, <<"">>, <<".*">>, Config,
+ ["operation queue.bind caused a channel exception access_refused"]).
+
+no_queue_consume_permission(Config) ->
+ test_subscribe_permissions_combination(<<".*">>, <<".*">>, <<"^amq\\.topic">>, Config,
+ ["operation basic.consume caused a channel exception access_refused"]).
+
+no_queue_delete_permission(Config) ->
+ set_permissions(".*", ".*", ".*", Config),
+ C1 = open_mqtt_connection(Config, [{client_id, <<"no_queue_delete_permission">>}, {clean_sess, false}]),
+ emqttc:sync_subscribe(C1, {<<"test/topic">>, qos1}),
+ emqttc:disconnect(C1),
+ set_permissions(<<>>, ".*", ".*", Config),
+
+ %% And now we have a durable queue that user don't have permissions to delete
+ %% Attempt to establish clean session should fail
+
+ expect_server_error(
+ fun () ->
+ connect_user(
+ ?config(mqtt_user, Config), ?config(mqtt_password, Config),
+ Config, ?config(mqtt_user, Config),
+ [{client_id, <<"no_queue_delete_permission">>}, {clean_sess, true}])
+ end),
+
+ wait_log(Config, erlang:system_time(microsecond) + 1000000,
+ [{["Generic server.*terminating"], fun () -> exit(there_should_be_no_crashes) end}
+ ,{["operation queue.delete caused a channel exception access_refused",
+ "MQTT cannot start a clean session: `configure` permission missing for queue"],
+ fun () -> stop end}
+ ]),
+ ok.
+
+no_queue_consume_permission_on_connect(Config) ->
+ set_permissions(".*", ".*", ".*", Config),
+ C1 = open_mqtt_connection(Config, [{client_id, <<"no_queue_consume_permission_on_connect">>}, {clean_sess, false}]),
+ emqttc:sync_subscribe(C1, {<<"test/topic">>, qos1}),
+ emqttc:disconnect(C1),
+ set_permissions(".*", ".*", "^amq\\.topic", Config),
+
+ expect_server_error(
+ fun () ->
+ connect_user(
+ ?config(mqtt_user, Config), ?config(mqtt_password, Config),
+ Config, ?config(mqtt_user, Config),
+ [{client_id, <<"no_queue_consume_permission_on_connect">>}, {clean_sess, false}])
+ end),
+
+ wait_log(Config, erlang:system_time(microsecond) + 1000000,
+ [{["Generic server.*terminating"], fun () -> exit(there_should_be_no_crashes) end}
+ ,{["operation basic.consume caused a channel exception access_refused",
+ "MQTT cannot recover a session, user is missing permissions"],
+ fun () -> stop end}
+ ]),
+ ok.
+
+
+no_queue_declare_permission(Config) ->
+ rabbit_ct_broker_helpers:set_permissions(Config, ?config(mqtt_user, Config), ?config(mqtt_vhost, Config), <<"">>, <<".*">>, <<".*">>),
+ P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt),
+ {ok, C} = emqttc:start_link([{host, "localhost"},
+ {port, P},
+ {client_id, <<"no_queue_declare_permission">>},
+ {proto_ver, 3},
+ {logger, info},
+ {username, ?config(mqtt_user, Config)},
+ {password, ?config(mqtt_password, Config)},
+ {clean_sess, false}
+ ]),
+
+ receive
+ {mqttc, _, connected} -> ok
+ after
+ ?CONNECT_TIMEOUT -> exit(emqttc_connection_timeout)
+ end,
+
+ process_flag(trap_exit, true),
+ try emqttc:sync_subscribe(C, <<"test/topic">>) of
+ _ -> exit(this_should_not_succeed)
+ catch
+ exit:{{shutdown, tcp_closed} , _} -> ok
+ end,
+ process_flag(trap_exit, false),
+
+ wait_log(Config, erlang:system_time(microsecond) + 1000000,
+ [{["Generic server.*terminating"], fun () -> exit(there_should_be_no_crashes) end}
+ ,{["MQTT protocol error on connection.*access_refused",
+ "operation queue.declare caused a channel exception access_refused"],
+ fun () -> stop end}
+ ]),
+ ok.
+
+no_topic_read_permission(Config) ->
+ set_permissions(".*", ".*", ".*", Config),
+ set_topic_permissions("^allow-write\\..*", "^allow-read\\..*", Config),
+
+ C = open_mqtt_connection(Config),
+
+ emqttc:sync_subscribe(C, <<"allow-read/some/topic">>), %% Just to be sure that our permission setup is indeed working
+
+ expect_sync_error(fun () ->
+ emqttc:sync_subscribe(C, <<"test/topic">>)
+ end),
+ wait_log(Config, erlang:system_time(microsecond) + 1000000,
+ [?WAIT_LOG_NO_CRASHES
+ ,{["MQTT protocol error on connection.*access_refused",
+ "operation queue.bind caused a channel exception access_refused: access to topic 'test.topic' in exchange 'amq.topic' in vhost 'mqtt-vhost' refused for user 'mqtt-user'"],
+ fun () -> stop end}
+ ]),
+ ok.
+
+no_topic_write_permission(Config) ->
+ set_permissions(".*", ".*", ".*", Config),
+ set_topic_permissions("^allow-write\\..*", "^allow-read\\..*", Config),
+ C = open_mqtt_connection(Config),
+
+ emqttc:sync_publish(C, <<"allow-write/some/topic">>, <<"payload">>, qos1), %% Just to be sure that our permission setup is indeed working
+
+ expect_sync_error(fun () ->
+ emqttc:sync_publish(C, <<"some/other/topic">>, <<"payload">>, qos1)
+ end),
+ wait_log(Config, erlang:system_time(microsecond) + 1000000,
+ [?WAIT_LOG_NO_CRASHES
+ ,{["access to topic 'some.other.topic' in exchange 'amq.topic' in vhost 'mqtt-vhost' refused for user 'mqtt-user'",
+ "MQTT connection.*is closing due to an authorization failure"],
+ fun () -> stop end}
+ ]),
+ ok.
+
+
+expect_server_error(Fun) ->
+ process_flag(trap_exit, true),
+ {ok, C} = Fun(),
+ Result = receive
+ {mqttc, C, connected} -> {error, unexpected_successful_connection};
+ {'EXIT', C, {shutdown,{connack_error,'CONNACK_SERVER'}}} -> ok;
+ {'EXIT', C, {shutdown, Err}} -> {error, unexpected_error, Err}
+ after
+ ?CONNECT_TIMEOUT -> {error, emqttc_connection_timeout}
+ end,
+ process_flag(trap_exit, false),
+
+ case Result of
+ ok -> ok;
+ {error, E} -> exit(E)
+ end.
+
+expect_sync_error(Fun) ->
+ process_flag(trap_exit, true),
+ try Fun() of
+ _ -> exit(this_should_not_succeed)
+ catch
+ exit:{{shutdown, tcp_closed} , _} -> ok
+ after
+ process_flag(trap_exit, false)
+ end.
+
+set_topic_permissions(WritePat, ReadPat, Config) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0,
+ rabbit_auth_backend_internal, set_topic_permissions,
+ [?config(mqtt_user, Config), ?config(mqtt_vhost, Config),
+ <<"amq.topic">>, WritePat, ReadPat, <<"acting-user">>]).
+
+
+set_permissions(PermConf, PermWrite, PermRead, Config) ->
+ rabbit_ct_broker_helpers:set_permissions(Config, ?config(mqtt_user, Config), ?config(mqtt_vhost, Config),
+ iolist_to_binary(PermConf),
+ iolist_to_binary(PermWrite),
+ iolist_to_binary(PermRead)).
+
+open_mqtt_connection(Config) ->
+ open_mqtt_connection(Config, []).
+open_mqtt_connection(Config, Opts) ->
+ {ok, C} = connect_user(?config(mqtt_user, Config), ?config(mqtt_password, Config), Config, ?config(mqtt_user, Config), Opts),
+ receive
+ {mqttc, _, connected} -> ok
+ after
+ ?CONNECT_TIMEOUT -> exit(emqttc_connection_timeout)
+ end,
+ C.
+
+test_subscribe_permissions_combination(PermConf, PermWrite, PermRead, Config, ExtraLogChecks) ->
+ rabbit_ct_broker_helpers:set_permissions(Config, ?config(mqtt_user, Config), ?config(mqtt_vhost, Config), PermConf, PermWrite, PermRead),
+
+ {ok, C} = connect_user(?config(mqtt_user, Config), ?config(mqtt_password, Config), Config),
+ receive
+ {mqttc, _, connected} -> ok
+ after
+ ?CONNECT_TIMEOUT -> exit(emqttc_connection_timeout)
+ end,
+
+ process_flag(trap_exit, true),
+ try emqttc:sync_subscribe(C, <<"test/topic">>) of
+ _ -> exit(this_should_not_succeed)
+ catch
+ exit:{{shutdown, tcp_closed} , _} -> ok
+ end,
+
+ process_flag(trap_exit, false),
+
+ wait_log(Config, erlang:system_time(microsecond) + 1000000,
+ [{["Generic server.*terminating"], fun () -> exit(there_should_be_no_crashes) end}
+ ,{["MQTT protocol error on connection.*access_refused"|ExtraLogChecks],
+ fun () -> stop end}
+ ]),
+ ok.
+
connect_user(User, Pass, Config) ->
- connect_user(User, Pass, Config, User).
+ connect_user(User, Pass, Config, User, []).
connect_user(User, Pass, Config, ClientID) ->
+ connect_user(User, Pass, Config, ClientID, []).
+connect_user(User, Pass, Config, ClientID, Opts) ->
Creds = case User of
undefined -> [];
_ -> [{username, User}]
@@ -446,11 +735,13 @@ connect_user(User, Pass, Config, ClientID) ->
_ -> [{password, Pass}]
end,
P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt),
- emqttc:start_link([{host, "localhost"},
- {port, P},
- {client_id, ClientID},
- {proto_ver, 3},
- {logger, info}] ++ Creds).
+ emqttc:start_link(Opts
+ ++ [{host, "localhost"},
+ {port, P},
+ {client_id, ClientID},
+ {proto_ver, 3},
+ {logger, info}]
+ ++ Creds).
expect_successful_connection(ConnectFun, Config) ->
rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_core_metrics, reset_auth_attempt_metrics, []),
@@ -491,3 +782,38 @@ expect_authentication_failure(ConnectFun, Config) ->
ok -> ok;
{error, Err} -> exit(Err)
end.
+
+wait_log(Config, Deadline, Clauses) ->
+ {ok, Content} = file:read_file(?config(log_location, Config)),
+ case erlang:system_time(microsecond) of
+ T when T > Deadline ->
+ lists:foreach(fun
+ ({REs, _}) ->
+ Matches = [ io_lib:format("~p - ~s~n", [RE, re:run(Content, RE, [{capture, none}])]) || RE <- REs ],
+ ct:pal("Wait log clause status: ~s", [Matches])
+ end, Clauses),
+ exit(no_log_lines_detected);
+ _ -> ok
+ end,
+ case wait_log_check_clauses(Content, Clauses) of
+ stop -> ok;
+ continue ->
+ timer:sleep(50),
+ wait_log(Config, Deadline, Clauses)
+ end,
+ ok.
+
+wait_log_check_clauses(_, []) ->
+ continue;
+wait_log_check_clauses(Content, [{REs, Fun}|Rest]) ->
+ case multiple_re_match(Content, REs) of
+ true -> Fun();
+ _ ->
+ wait_log_check_clauses(Content, Rest)
+ end.
+
+multiple_re_match(Content, REs) ->
+ lists:all(fun (RE) ->
+ match == re:run(Content, RE, [{capture, none}])
+ end,
+ REs).
diff --git a/deps/rabbitmq_mqtt/test/cluster_SUITE.erl b/deps/rabbitmq_mqtt/test/cluster_SUITE.erl
index 941b195ced..72fd0bde7a 100644
--- a/deps/rabbitmq_mqtt/test/cluster_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/cluster_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(cluster_SUITE).
-compile([export_all]).
diff --git a/deps/rabbitmq_mqtt/test/command_SUITE.erl b/deps/rabbitmq_mqtt/test/command_SUITE.erl
index a15c3789f7..9f30f5e045 100644
--- a/deps/rabbitmq_mqtt/test/command_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/command_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(command_SUITE).
diff --git a/deps/rabbitmq_mqtt/test/config_schema_SUITE.erl b/deps/rabbitmq_mqtt/test/config_schema_SUITE.erl
index c760148cad..15e965c23e 100644
--- a/deps/rabbitmq_mqtt/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_mqtt/test/java_SUITE.erl b/deps/rabbitmq_mqtt/test/java_SUITE.erl
index 34ec8dac19..079caa4b1f 100644
--- a/deps/rabbitmq_mqtt/test/java_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/java_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(java_SUITE).
diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/MqttTest.java b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/MqttTest.java
index 24c4a0be14..d5386090f4 100644
--- a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/MqttTest.java
+++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/MqttTest.java
@@ -2,7 +2,7 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
//
-// Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+// Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
//
package com.rabbitmq.mqtt.test;
@@ -705,7 +705,7 @@ public class MqttTest implements MqttCallback {
// let last will propagate after disconnection
waitForTestDelay();
-
+
client2.connect(client2_opts);
client2.setCallback(this);
client2.subscribe(retainedTopic, 1);
diff --git a/deps/rabbitmq_mqtt/test/mqtt_machine_SUITE.erl b/deps/rabbitmq_mqtt/test/mqtt_machine_SUITE.erl
index abdc3506dc..7ce08cbc2c 100644
--- a/deps/rabbitmq_mqtt/test/mqtt_machine_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/mqtt_machine_SUITE.erl
@@ -21,7 +21,9 @@ all() ->
all_tests() ->
[
- basics
+ basics,
+ machine_upgrade,
+ many_downs
].
groups() ->
@@ -54,17 +56,59 @@ end_per_testcase(_TestCase, _Config) ->
basics(_Config) ->
S0 = mqtt_machine:init(#{}),
ClientId = <<"id1">>,
+ OthPid = spawn(fun () -> ok end),
{S1, ok, _} = mqtt_machine:apply(meta(1), {register, ClientId, self()}, S0),
?assertMatch(#machine_state{client_ids = Ids} when map_size(Ids) == 1, S1),
- {S2, ok, _} = mqtt_machine:apply(meta(2), {register, ClientId, self()}, S1),
- ?assertMatch(#machine_state{client_ids = Ids} when map_size(Ids) == 1, S2),
- {S3, ok, _} = mqtt_machine:apply(meta(3), {down, self(), noproc}, S2),
+ ?assertMatch(#machine_state{pids = Pids} when map_size(Pids) == 1, S1),
+ {S2, ok, _} = mqtt_machine:apply(meta(2), {register, ClientId, OthPid}, S1),
+ ?assertMatch(#machine_state{client_ids = #{ClientId := OthPid} = Ids}
+ when map_size(Ids) == 1, S2),
+ {S3, ok, _} = mqtt_machine:apply(meta(3), {down, OthPid, noproc}, S2),
?assertMatch(#machine_state{client_ids = Ids} when map_size(Ids) == 0, S3),
- {S4, ok, _} = mqtt_machine:apply(meta(3), {unregister, ClientId, self()}, S2),
+ {S4, ok, _} = mqtt_machine:apply(meta(3), {unregister, ClientId, OthPid}, S2),
?assertMatch(#machine_state{client_ids = Ids} when map_size(Ids) == 0, S4),
ok.
+machine_upgrade(_Config) ->
+ S0 = mqtt_machine_v0:init(#{}),
+ ClientId = <<"id1">>,
+ Self = self(),
+ {S1, ok, _} = mqtt_machine_v0:apply(meta(1), {register, ClientId, self()}, S0),
+ ?assertMatch({machine_state, Ids} when map_size(Ids) == 1, S1),
+ {S2, ok, _} = mqtt_machine:apply(meta(2), {machine_version, 0, 1}, S1),
+ ?assertMatch(#machine_state{client_ids = #{ClientId := Self},
+ pids = #{Self := [ClientId]} = Pids}
+ when map_size(Pids) == 1, S2),
+ {S3, ok, _} = mqtt_machine:apply(meta(3), {down, self(), noproc}, S2),
+ ?assertMatch(#machine_state{client_ids = Ids,
+ pids = Pids}
+ when map_size(Ids) == 0 andalso map_size(Pids) == 0, S3),
+
+ ok.
+
+many_downs(_Config) ->
+ S0 = mqtt_machine:init(#{}),
+ Clients = [{list_to_binary(integer_to_list(I)), spawn(fun() -> ok end)}
+ || I <- lists:seq(1, 10000)],
+ S1 = lists:foldl(
+ fun ({ClientId, Pid}, Acc0) ->
+ {Acc, ok, _} = mqtt_machine:apply(meta(1), {register, ClientId, Pid}, Acc0),
+ Acc
+ end, S0, Clients),
+ _ = lists:foldl(
+ fun ({_ClientId, Pid}, Acc0) ->
+ {Acc, ok, _} = mqtt_machine:apply(meta(1), {down, Pid, noproc}, Acc0),
+ Acc
+ end, S1, Clients),
+ _ = lists:foldl(
+ fun ({ClientId, Pid}, Acc0) ->
+ {Acc, ok, _} = mqtt_machine:apply(meta(1), {unregister, ClientId,
+ Pid}, Acc0),
+ Acc
+ end, S0, Clients),
+
+ ok.
%% Utility
meta(Idx) ->
diff --git a/deps/rabbitmq_mqtt/test/processor_SUITE.erl b/deps/rabbitmq_mqtt/test/processor_SUITE.erl
index e38a1d5318..ca49aeaa86 100644
--- a/deps/rabbitmq_mqtt/test/processor_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/processor_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(processor_SUITE).
diff --git a/deps/rabbitmq_mqtt/test/proxy_protocol_SUITE.erl b/deps/rabbitmq_mqtt/test/proxy_protocol_SUITE.erl
index 5403de23d3..d0075bc605 100644
--- a/deps/rabbitmq_mqtt/test/proxy_protocol_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/proxy_protocol_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(proxy_protocol_SUITE).
-compile([export_all]).
@@ -68,7 +68,7 @@ proxy_protocol(Config) ->
{ok, _Packet} = gen_tcp:recv(Socket, 0, ?TIMEOUT),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
gen_tcp:close(Socket),
ok.
@@ -83,7 +83,7 @@ proxy_protocol_tls(Config) ->
{ok, _Packet} = ssl:recv(SslSocket, 0, ?TIMEOUT),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
gen_tcp:close(Socket),
ok.
diff --git a/deps/rabbitmq_mqtt/test/rabbit_auth_backend_mqtt_mock.erl b/deps/rabbitmq_mqtt/test/rabbit_auth_backend_mqtt_mock.erl
index 5272138c6b..b19f4e22af 100644
--- a/deps/rabbitmq_mqtt/test/rabbit_auth_backend_mqtt_mock.erl
+++ b/deps/rabbitmq_mqtt/test/rabbit_auth_backend_mqtt_mock.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2019-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% A mock authn/authz that records information during calls. For testing purposes only.
@@ -13,13 +13,21 @@
-behaviour(rabbit_authn_backend).
-behaviour(rabbit_authz_backend).
--export([user_login_authentication/2, user_login_authorization/2,
+-export([setup/1,
+ user_login_authentication/2, user_login_authorization/2,
check_vhost_access/3, check_resource_access/4, check_topic_access/4,
state_can_expire/0,
get/1]).
-user_login_authentication(_, AuthProps) ->
+setup(CallerPid) ->
ets:new(?MODULE, [set, public, named_table]),
+ CallerPid ! ok,
+ receive
+ stop -> ok
+ end.
+
+
+user_login_authentication(_, AuthProps) ->
ets:insert(?MODULE, {authentication, AuthProps}),
{ok, #auth_user{username = <<"dummy">>,
tags = [],
diff --git a/deps/rabbitmq_mqtt/test/reader_SUITE.erl b/deps/rabbitmq_mqtt/test/reader_SUITE.erl
index b94fdb5920..08ac8aa094 100644
--- a/deps/rabbitmq_mqtt/test/reader_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/reader_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(reader_SUITE).
-compile([export_all]).
diff --git a/deps/rabbitmq_mqtt/test/retainer_SUITE.erl b/deps/rabbitmq_mqtt/test/retainer_SUITE.erl
index 22b72a8d87..28ecf2da52 100644
--- a/deps/rabbitmq_mqtt/test/retainer_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/retainer_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(retainer_SUITE).
-compile([export_all]).
diff --git a/deps/rabbitmq_mqtt/test/util_SUITE.erl b/deps/rabbitmq_mqtt/test/util_SUITE.erl
index 6694498595..ae62d61919 100644
--- a/deps/rabbitmq_mqtt/test/util_SUITE.erl
+++ b/deps/rabbitmq_mqtt/test/util_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(util_SUITE).
-compile([export_all]).
diff --git a/deps/rabbitmq_peer_discovery_aws/BUILD.bazel b/deps/rabbitmq_peer_discovery_aws/BUILD.bazel
new file mode 100644
index 0000000000..d5347c27fe
--- /dev/null
+++ b/deps/rabbitmq_peer_discovery_aws/BUILD.bazel
@@ -0,0 +1,94 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_peer_discovery_aws"
+
+APP_DESCRIPTION = "AWS-based RabbitMQ peer discovery backend"
+
+EXTRA_APPS = [
+ "inets",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_peer_discovery_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbitmq_aws:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+ extra_apps = EXTRA_APPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_peer_discovery_aws"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ size = "medium",
+ ),
+ # NOTE: integration_SUITE requires aws credentials and a docker image.
+ # They can be supplied with:
+ # --test_env AWS_ACCESS_KEY_ID=... --test_env AWS_SECRET_ACCESS_KEY=...
+ # --test_env RABBITMQ_IMAGE=...
+ # bazel args
+ rabbitmq_suite(
+ name = "integration_SUITE",
+ size = "large",
+ additional_srcs = [
+ "test/aws_ecs_util.erl",
+ ],
+ tags = [
+ "aws",
+ "external",
+ ],
+ deps = [
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ size = "small",
+ runtime_deps = [
+ "@meck//:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_peer_discovery_aws/Makefile b/deps/rabbitmq_peer_discovery_aws/Makefile
index 411afa867a..6ec7bdddba 100644
--- a/deps/rabbitmq_peer_discovery_aws/Makefile
+++ b/deps/rabbitmq_peer_discovery_aws/Makefile
@@ -3,7 +3,7 @@ PROJECT_DESCRIPTION = AWS-based RabbitMQ peer discovery backend
LOCAL_DEPS = inets
DEPS = rabbit_common rabbitmq_peer_discovery_common rabbitmq_aws rabbit
-TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers ct_helper
+TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers ct_helper meck
dep_ct_helper = git https://github.com/extend/ct_helper.git master
DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-plugin.mk
@@ -15,5 +15,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_peer_discovery_aws/README.md b/deps/rabbitmq_peer_discovery_aws/README.md
index 3e755de69a..1aaf661b91 100644
--- a/deps/rabbitmq_peer_discovery_aws/README.md
+++ b/deps/rabbitmq_peer_discovery_aws/README.md
@@ -53,4 +53,4 @@ See [CONTRIBUTING.md](./CONTRIBUTING.md) and our [development process overview](
## Copyright
-(c) 2007-2020 VMware, Inc. or its affiliates.
+(c) 2007-2021 VMware, Inc. or its affiliates.
diff --git a/deps/rabbitmq_peer_discovery_aws/erlang.mk b/deps/rabbitmq_peer_discovery_aws/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_peer_discovery_aws/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_peer_discovery_aws/priv/schema/rabbitmq_peer_discovery_aws.schema b/deps/rabbitmq_peer_discovery_aws/priv/schema/rabbitmq_peer_discovery_aws.schema
index 85a55404fd..63e76c7a1f 100644
--- a/deps/rabbitmq_peer_discovery_aws/priv/schema/rabbitmq_peer_discovery_aws.schema
+++ b/deps/rabbitmq_peer_discovery_aws/priv/schema/rabbitmq_peer_discovery_aws.schema
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% region
diff --git a/deps/rabbitmq_peer_discovery_aws/rabbitmq-components.mk b/deps/rabbitmq_peer_discovery_aws/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_peer_discovery_aws/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_peer_discovery_aws/src/rabbit_peer_discovery_aws.erl b/deps/rabbitmq_peer_discovery_aws/src/rabbit_peer_discovery_aws.erl
index a814b0ac2f..a668f5843d 100644
--- a/deps/rabbitmq_peer_discovery_aws/src/rabbit_peer_discovery_aws.erl
+++ b/deps/rabbitmq_peer_discovery_aws/src/rabbit_peer_discovery_aws.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_aws).
@@ -23,16 +23,6 @@
-compile(export_all).
-endif.
-% rabbitmq/rabbitmq-peer-discovery-aws#25
-
-% Note: this timeout must not be greater than the default
-% gen_server:call timeout of 5000ms. Note that `timeout`,
-% when set, is used as the connect and then request timeout
-% by `httpc`
--define(INSTANCE_ID_TIMEOUT, 2250).
--define(INSTANCE_ID_URL,
- "http://169.254.169.254/latest/meta-data/instance-id").
-
-define(CONFIG_MODULE, rabbit_peer_discovery_config).
-define(UTIL_MODULE, rabbit_peer_discovery_util).
@@ -91,14 +81,17 @@ init() ->
list_nodes() ->
M = ?CONFIG_MODULE:config_map(?BACKEND_CONFIG_KEY),
{ok, _} = application:ensure_all_started(rabbitmq_aws),
- rabbit_log:debug("Started rabbitmq_aws"),
rabbit_log:debug("Will use AWS access key of '~s'", [get_config_key(aws_access_key, M)]),
ok = maybe_set_region(get_config_key(aws_ec2_region, M)),
ok = maybe_set_credentials(get_config_key(aws_access_key, M),
get_config_key(aws_secret_key, M)),
case get_config_key(aws_autoscaling, M) of
true ->
- get_autoscaling_group_node_list(instance_id(), get_tags());
+ case rabbitmq_aws_config:instance_id() of
+ {ok, InstanceId} -> rabbit_log:debug("EC2 instance ID is determined from metadata service: ~p", [InstanceId]),
+ get_autoscaling_group_node_list(InstanceId, get_tags());
+ _ -> {error, "Failed to determine EC2 instance ID from metadata service"}
+ end;
false ->
get_node_list_from_tags(get_tags())
end.
@@ -106,9 +99,7 @@ list_nodes() ->
-spec supports_registration() -> boolean().
supports_registration() ->
- %% see rabbitmq-peer-discovery-aws#17
- true.
-
+ false.
-spec register() -> ok.
register() ->
@@ -123,19 +114,44 @@ unregister() ->
post_registration() ->
ok.
--spec lock(Node :: atom()) -> not_supported.
-
-lock(_Node) ->
- not_supported.
+-spec lock(Node :: node()) -> {ok, {{ResourceId :: string(), LockRequesterId :: node()}, Nodes :: [node()]}} |
+ {error, Reason :: string()}.
--spec unlock(Data :: term()) -> ok.
-
-unlock(_Data) ->
- ok.
+lock(Node) ->
+ %% call list_nodes/0 externally such that meck can mock the function
+ case ?MODULE:list_nodes() of
+ {ok, {[], disc}} ->
+ {error, "Cannot lock since no nodes got discovered."};
+ {ok, {Nodes, disc}} ->
+ case lists:member(Node, Nodes) of
+ true ->
+ rabbit_log:info("Will try to lock connecting to nodes ~p", [Nodes]),
+ LockId = rabbit_nodes:lock_id(Node),
+ Retries = rabbit_nodes:lock_retries(),
+ case global:set_lock(LockId, Nodes, Retries) of
+ true ->
+ {ok, {LockId, Nodes}};
+ false ->
+ {error, io_lib:format("Acquiring lock taking too long, bailing out after ~b retries", [Retries])}
+ end;
+ false ->
+ %% Don't try to acquire the global lock when our own node is not discoverable by peers.
+ %% We shouldn't run into this branch because our node is running and should have been discovered.
+ {error, lists:flatten(io_lib:format("Local node ~s is not part of discovered nodes ~p", [Node, Nodes]))}
+ end;
+ {error, _} = Error ->
+ Error
+ end.
+
+-spec unlock({{ResourceId :: string(), LockRequestedId :: atom()}, Nodes :: [atom()]}) -> 'ok'.
+unlock({LockId, Nodes}) ->
+ global:del_lock(LockId, Nodes),
+ ok.
%%
%% Implementation
%%
+
-spec get_config_key(Key :: atom(), Map :: #{atom() => peer_discovery_config_value()})
-> peer_discovery_config_value().
@@ -160,14 +176,18 @@ maybe_set_credentials(AccessKey, SecretKey) ->
%% @doc Set the region from the configuration value, if it was set.
%% @end
%%
-maybe_set_region("undefined") -> ok;
+maybe_set_region("undefined") ->
+ case rabbitmq_aws_config:region() of
+ {ok, Region} -> maybe_set_region(Region);
+ _ -> ok
+ end;
maybe_set_region(Value) ->
rabbit_log:debug("Setting AWS region to ~p", [Value]),
rabbitmq_aws:set_region(Value).
get_autoscaling_group_node_list(error, _) ->
rabbit_log:warning("Cannot discover any nodes: failed to fetch this node's EC2 "
- "instance id from ~s", [?INSTANCE_ID_URL]),
+ "instance id from ~s", [rabbitmq_aws_config:instance_id_url()]),
{ok, {[], disc}};
get_autoscaling_group_node_list(Instance, Tag) ->
case get_all_autoscaling_instances([]) of
@@ -222,7 +242,7 @@ get_all_autoscaling_instances(Accum, NextToken) ->
fetch_all_autoscaling_instances(QArgs, Accum) ->
Path = "/?" ++ rabbitmq_aws_urilib:build_query_string(QArgs),
- case api_get_request("autoscaling", Path) of
+ case rabbitmq_aws:api_get_request("autoscaling", Path) of
{ok, Payload} ->
Instances = flatten_autoscaling_datastructure(Payload),
NextToken = get_next_token(Payload),
@@ -244,16 +264,6 @@ get_next_token(Value) ->
NextToken = proplists:get_value("NextToken", Result),
NextToken.
-api_get_request(Service, Path) ->
- case rabbitmq_aws:get(Service, Path) of
- {ok, {_Headers, Payload}} ->
- rabbit_log:debug("AWS request: ~s~nResponse: ~p~n",
- [Path, Payload]),
- {ok, Payload};
- {error, {credentials, _}} -> {error, credentials};
- {error, Message, _} -> {error, Message}
- end.
-
-spec find_autoscaling_group(Instances :: list(), Instance :: string())
-> string() | error.
%% @private
@@ -320,7 +330,7 @@ get_hostname_name_from_reservation_set([{"item", RI}|T], Accum) ->
get_hostname_name_from_reservation_set(T, Accum ++ Hostnames).
get_hostname_names(Path) ->
- case api_get_request("ec2", Path) of
+ case rabbitmq_aws:api_get_request("ec2", Path) of
{ok, Payload} ->
Response = proplists:get_value("DescribeInstancesResponse", Payload),
ReservationSet = proplists:get_value("reservationSet", Response),
@@ -351,24 +361,6 @@ select_hostname() ->
_ -> "privateDnsName"
end.
--spec instance_id() -> string() | error.
-%% @private
-%% @doc Return the local instance ID from the EC2 metadata service
-%% @end
-%%
-instance_id() ->
- case httpc:request(get, {?INSTANCE_ID_URL, []},
- [{timeout, ?INSTANCE_ID_TIMEOUT}], []) of
- {ok, {{_, 200, _}, _, Value}} ->
- rabbit_log:debug("Fetched EC2 instance ID from ~p: ~p",
- [?INSTANCE_ID_URL, Value]),
- Value;
- Other ->
- rabbit_log:error("Failed to fetch EC2 instance ID from ~p: ~p",
- [?INSTANCE_ID_URL, Other]),
- error
- end.
-
-spec get_tags() -> tags().
get_tags() ->
Tags = get_config_key(aws_ec2_tags, ?CONFIG_MODULE:config_map(?BACKEND_CONFIG_KEY)),
diff --git a/deps/rabbitmq_peer_discovery_aws/src/rabbitmq_peer_discovery_aws.erl b/deps/rabbitmq_peer_discovery_aws/src/rabbitmq_peer_discovery_aws.erl
index f60667dfd3..840d7e7687 100644
--- a/deps/rabbitmq_peer_discovery_aws/src/rabbitmq_peer_discovery_aws.erl
+++ b/deps/rabbitmq_peer_discovery_aws/src/rabbitmq_peer_discovery_aws.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This module exists as an alias for rabbit_peer_discovery_aws.
@@ -45,10 +45,10 @@ unregister() ->
post_registration() ->
?DELEGATE:post_registration().
--spec lock(Node :: atom()) -> not_supported.
+-spec lock(Node :: node()) -> {ok, {ResourceId :: string(), LockRequesterId :: node()}} | {error, Reason :: string()}.
lock(Node) ->
?DELEGATE:lock(Node).
--spec unlock(Data :: term()) -> ok.
+-spec unlock({{ResourceId :: string(), LockRequestedId :: atom()}, Nodes :: [atom()]}) -> 'ok'.
unlock(Data) ->
?DELEGATE:unlock(Data).
diff --git a/deps/rabbitmq_peer_discovery_aws/test/aws_ecs_util.erl b/deps/rabbitmq_peer_discovery_aws/test/aws_ecs_util.erl
new file mode 100644
index 0000000000..2d43f6701a
--- /dev/null
+++ b/deps/rabbitmq_peer_discovery_aws/test/aws_ecs_util.erl
@@ -0,0 +1,358 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(aws_ecs_util).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
+
+-export([ensure_aws_cli/1,
+ ensure_ecs_cli/1,
+ init_aws_credentials/1,
+ ensure_rabbitmq_image/1,
+ start_ecs_cluster/1,
+ destroy_ecs_cluster/1,
+ register_task/2,
+ deregister_tasks/1,
+ create_service/1,
+ delete_service/1,
+ public_dns_names/1,
+ fetch_nodes_endpoint/2]).
+
+-define(ECS_CLUSTER_TIMEOUT, 300_000).
+
+%% NOTE:
+%% These helpers assume certain permissions associated with the aws credentials
+%% used. The user must have at least a policy with:
+%% {
+%% "Version": "2012-10-17",
+%% "Statement": [
+%% {
+%% "Sid": "VisualEditor0",
+%% "Effect": "Allow",
+%% "Action": [
+%% "iam:CreateInstanceProfile",
+%% "iam:DeleteInstanceProfile",
+%% "iam:PassRole",
+%% "iam:DetachRolePolicy",
+%% "iam:DeleteRolePolicy",
+%% "iam:RemoveRoleFromInstanceProfile",
+%% "iam:CreateRole",
+%% "iam:DeleteRole",
+%% "iam:AttachRolePolicy",
+%% "iam:AddRoleToInstanceProfile"
+%% ],
+%% "Resource": "*"
+%% }
+%% ]
+%% }
+%%
+%% Additionally, there must be a role called 'ecs-peer-discovery-aws' with the
+%% following policies:
+%% - AmazonEC2FullAccess
+%% - AmazonECS_FullAccess
+%% - AmazonEC2ContainerServiceforEC2Role
+
+ensure_aws_cli(Config) ->
+ Aws = "aws",
+ case rabbit_ct_helpers:exec([Aws, "--version"], [{match_stdout, "aws-cli"}]) of
+ {ok, _} -> rabbit_ct_helpers:set_config(Config, {aws_cmd, Aws});
+ _ -> {skip, "aws cli required"}
+ end.
+
+ensure_ecs_cli(Config) ->
+ Ecs = "ecs-cli",
+ case rabbit_ct_helpers:exec([Ecs, "--version"], [{match_stdout, "ecs-cli"}]) of
+ {ok, _} -> rabbit_ct_helpers:set_config(Config, {ecs_cli_cmd, Ecs});
+ _ -> {skip, "ecs-cli required"}
+ end.
+
+init_aws_credentials(Config) ->
+ AccessKeyId = get_env_var_or_awscli_config_key(
+ "AWS_ACCESS_KEY_ID", "aws_access_key_id"),
+ SecretAccessKey = get_env_var_or_awscli_config_key(
+ "AWS_SECRET_ACCESS_KEY", "aws_secret_access_key"),
+ rabbit_ct_helpers:set_config(
+ Config,
+ [{aws_access_key_id, AccessKeyId},
+ {aws_secret_access_key, SecretAccessKey}]).
+
+get_env_var_or_awscli_config_key(EnvVar, AwscliKey) ->
+ case os:getenv(EnvVar) of
+ false -> get_awscli_config_key(AwscliKey);
+ Value -> Value
+ end.
+
+get_awscli_config_key(AwscliKey) ->
+ AwscliConfig = read_awscli_config(),
+ maps:get(AwscliKey, AwscliConfig, undefined).
+
+read_awscli_config() ->
+ Filename = filename:join([os:getenv("HOME"), ".aws", "credentials"]),
+ case filelib:is_regular(Filename) of
+ true -> read_awscli_config(Filename);
+ false -> #{}
+ end.
+
+read_awscli_config(Filename) ->
+ {ok, Content} = file:read_file(Filename),
+ Lines = string:tokens(binary_to_list(Content), "\n"),
+ read_awscli_config(Lines, #{}).
+
+read_awscli_config([Line | Rest], AwscliConfig) ->
+ Line1 = string:strip(Line),
+ case Line1 of
+ [$# | _] ->
+ read_awscli_config(Rest, AwscliConfig);
+ [$[ | _] ->
+ read_awscli_config(Rest, AwscliConfig);
+ _ ->
+ [Key, Value] = string:tokens(Line1, "="),
+ Key1 = string:strip(Key),
+ Value1 = string:strip(Value),
+ read_awscli_config(Rest, AwscliConfig#{Key1 => Value1})
+ end;
+read_awscli_config([], AwscliConfig) ->
+ AwscliConfig.
+
+ensure_rabbitmq_image(Config) ->
+ Image = case rabbit_ct_helpers:get_config(Config, rabbitmq_image) of
+ undefined -> os:getenv("RABBITMQ_IMAGE");
+ I -> I
+ end,
+ case Image of
+ false ->
+ {skip, "rabbitmq image required," ++
+ "please set RABBITMQ_IMAGE or 'rabbitmq_image' " ++
+ "in ct config"};
+ Img ->
+ rabbit_ct_helpers:set_config(
+ Config, {rabbitmq_image, Img})
+ end.
+
+ecs_configure(Config) ->
+ EcsCliCmd = ?config(ecs_cli_cmd, Config),
+ ClusterName = ?config(ecs_cluster_name, Config),
+ Region = ?config(ecs_region, Config),
+ ConfigureCmd = [EcsCliCmd, "configure",
+ "--cluster", ClusterName,
+ "--default-launch-type", "EC2",
+ "--config-name", ClusterName,
+ "--region", Region],
+ case rabbit_ct_helpers:exec(ConfigureCmd, []) of
+ {ok, _} -> Config;
+ _ -> {skip, "Could not configure ecs"}
+ end.
+
+ecs_configure_profile(Config) ->
+ EcsCliCmd = ?config(ecs_cli_cmd, Config),
+ ProfileName = ?config(ecs_profile_name, Config),
+ AccessKeyId = ?config(aws_access_key_id, Config),
+ SecretAccessKey = ?config(aws_secret_access_key, Config),
+ ConfigureProfileCmd = [EcsCliCmd, "configure", "profile",
+ "--access-key", AccessKeyId,
+ "--secret-key", SecretAccessKey,
+ "--profile-name", ProfileName],
+ case rabbit_ct_helpers:exec(ConfigureProfileCmd, []) of
+ {ok, _} -> Config;
+ _ -> {skip, "Could not configure ecs profile"}
+ end.
+
+ecs_up(Config) ->
+ EcsCliCmd = ?config(ecs_cli_cmd, Config),
+ InstanceRole = ?config(ecs_instance_role, Config),
+ ClusterName = ?config(ecs_cluster_name, Config),
+ ProfileName = ?config(ecs_profile_name, Config),
+ ClusterSize = ?config(ecs_cluster_size, Config),
+ UpCmd = [EcsCliCmd, "up",
+ "--instance-role", InstanceRole,
+ "--size", integer_to_list(ClusterSize),
+ "--instance-type", "t2.medium",
+ "--keypair", "id_rsa_terraform",
+ "--port", "15672",
+ "--cluster-config", ClusterName,
+ "--ecs-profile", ProfileName,
+ "--tags", "service=rabbitmq"],
+ case rabbit_ct_helpers:exec(UpCmd, []) of
+ {ok, _} -> Config;
+ _ -> {skip, "Could not start ecs cluster"}
+ end.
+
+ecs_down(Config) ->
+ EcsCliCmd = ?config(ecs_cli_cmd, Config),
+ ClusterName = ?config(ecs_cluster_name, Config),
+ ProfileName = ?config(ecs_profile_name, Config),
+ DownCmd = [EcsCliCmd, "down",
+ "--force",
+ "--cluster-config", ClusterName,
+ "--ecs-profile", ProfileName],
+ rabbit_ct_helpers:exec(DownCmd, []),
+ Config.
+
+adjust_security_group(Config) ->
+ AwsCmd = ?config(aws_cmd, Config),
+ ClusterName = ?config(ecs_cluster_name, Config),
+ Region = ?config(ecs_region, Config),
+ {ok, [GroupId]} = ?awaitMatch({ok, L} when length(L) == 1,
+ security_group_ids(AwsCmd, ClusterName, Region),
+ ?ECS_CLUSTER_TIMEOUT),
+ AuthorizeSecurityGroupIngress = [AwsCmd, "ec2", "authorize-security-group-ingress",
+ "--region", Region,
+ "--group-id", GroupId,
+ "--protocol", "tcp",
+ "--port", "1-65535",
+ "--source-group", GroupId],
+ {ok, _} = rabbit_ct_helpers:exec(AuthorizeSecurityGroupIngress, []),
+ Config.
+
+start_ecs_cluster(Config) ->
+ try rabbit_ct_helpers:run_steps(Config,
+ [fun ecs_configure/1,
+ fun ecs_configure_profile/1,
+ fun ecs_up/1,
+ fun adjust_security_group/1])
+ catch
+ Class:Reason:Stacktrace ->
+ ecs_down(Config),
+ erlang:raise(Class, Reason, Stacktrace)
+ end.
+
+destroy_ecs_cluster(Config) ->
+ ecs_down(Config).
+
+list_container_instances(AwsCmd, ClusterName, Region) ->
+ ListContainerInstances = [AwsCmd, "ecs", "list-container-instances",
+ "--cluster", ClusterName,
+ "--region", Region],
+ case rabbit_ct_helpers:exec(ListContainerInstances, [binary]) of
+ {ok, Response} -> rabbit_json:try_decode(Response);
+ Error -> Error
+ end.
+
+describe_container_instances(AwsCmd, ClusterName, Region) ->
+ case list_container_instances(AwsCmd, ClusterName, Region) of
+ {ok, #{<<"containerInstanceArns">> := []}} ->
+ {error, no_instances};
+ {ok, #{<<"containerInstanceArns">> := ContainerInstanceArns}} ->
+ InputJson = rabbit_json:encode(#{<<"cluster">> => list_to_binary(ClusterName),
+ <<"containerInstances">> => ContainerInstanceArns}),
+ DescribeContainerInstances = [AwsCmd, "ecs", "describe-container-instances",
+ "--region", Region,
+ "--query", "containerInstances[*].ec2InstanceId",
+ "--cli-input-json", InputJson],
+ case rabbit_ct_helpers:exec(DescribeContainerInstances, []) of
+ {ok, Response} -> rabbit_json:try_decode(list_to_binary(Response));
+ Error -> Error
+ end;
+ Error ->
+ Error
+ end.
+
+describe_instances(AwsCmd, ClusterName, Region, Query) ->
+ case describe_container_instances(AwsCmd, ClusterName, Region) of
+ {ok, InstanceIds} ->
+ DescribeInstances = [AwsCmd, "ec2", "describe-instances",
+ "--region", Region,
+ "--query", Query,
+ "--instance-ids"] ++ InstanceIds,
+ case rabbit_ct_helpers:exec(DescribeInstances, []) of
+ {ok, Response} -> rabbit_json:try_decode(list_to_binary(Response));
+ Error -> Error
+ end;
+ Error ->
+ Error
+ end.
+
+security_group_ids(AwsCmd, ClusterName, Region) ->
+ Query = "Reservations[*].Instances[].NetworkInterfaces[].Groups[].GroupId",
+ case describe_instances(AwsCmd, ClusterName, Region, Query) of
+ {ok, InstancesResponse} ->
+ {ok, lists:usort(InstancesResponse)};
+ Error ->
+ Error
+ end.
+
+register_task(Config, TaskJson) ->
+ AwsCmd = ?config(aws_cmd, Config),
+ Region = ?config(ecs_region, Config),
+ Cmd = [AwsCmd, "ecs", "register-task-definition",
+ "--region", Region,
+ "--cli-input-json", TaskJson],
+ case rabbit_ct_helpers:exec(Cmd, []) of
+ {ok, _} -> Config;
+ _ -> {skip, "Failed to register task with ecs"}
+ end.
+
+deregister_tasks(Config) ->
+ AwsCmd = ?config(aws_cmd, Config),
+ Region = ?config(ecs_region, Config),
+ ServiceName = ?config(ecs_service_name, Config),
+ ListCmd = [AwsCmd, "ecs", "list-task-definitions",
+ "--region", Region,
+ "--family-prefix", ServiceName],
+ {ok, Defs} = rabbit_ct_helpers:exec(ListCmd, [binary]),
+ #{<<"taskDefinitionArns">> := Arns} = rabbit_json:decode(Defs),
+ [begin
+ DelCmd = [AwsCmd, "ecs", "deregister-task-definition",
+ "--region", Region,
+ "--task-definition", Arn],
+ rabbit_ct_helpers:exec(DelCmd, [])
+ end || Arn <- Arns],
+ Config.
+
+create_service(Config) ->
+ AwsCmd = ?config(aws_cmd, Config),
+ Region = ?config(ecs_region, Config),
+ ClusterName = ?config(ecs_cluster_name, Config),
+ ServiceName = ?config(ecs_service_name, Config),
+ ClusterSize = ?config(ecs_cluster_size, Config),
+ Cmd = [AwsCmd, "ecs", "create-service",
+ "--region", Region,
+ "--cluster", ClusterName,
+ "--service-name", ServiceName,
+ "--desired-count", integer_to_list(ClusterSize),
+ "--launch-type", "EC2",
+ "--task-definition", ServiceName],
+ case rabbit_ct_helpers:exec(Cmd, []) of
+ {ok, _} -> Config;
+ _ -> {skip, "Failed to create service in ecs"}
+ end.
+
+delete_service(Config) ->
+ AwsCmd = ?config(aws_cmd, Config),
+ Region = ?config(ecs_region, Config),
+ ClusterName = ?config(ecs_cluster_name, Config),
+ ServiceName = ?config(ecs_service_name, Config),
+ Cmd = [AwsCmd, "ecs", "delete-service",
+ "--region", Region,
+ "--cluster", ClusterName,
+ "--service", ServiceName,
+ "--force"],
+ rabbit_ct_helpers:exec(Cmd, []),
+ Config.
+
+public_dns_names(Config) ->
+ AwsCmd = ?config(aws_cmd, Config),
+ ClusterName = ?config(ecs_cluster_name, Config),
+ Region = ?config(ecs_region, Config),
+ Query = "Reservations[*].Instances[].PublicDnsName",
+ describe_instances(AwsCmd, ClusterName, Region, Query).
+
+fetch_nodes_endpoint(Config, Host) when is_list(Host)->
+ DefaultUser = ?config(rabbitmq_default_user, Config),
+ DefaultPass = ?config(rabbitmq_default_pass, Config),
+ Url = "http://" ++ Host ++ ":15672/api/nodes",
+ case httpc:request(
+ get,
+ {Url, [rabbit_mgmt_test_util:auth_header(DefaultUser, DefaultPass)]},
+ [{version, "HTTP/1.0"}, {autoredirect, false}, {timeout, 10000}],
+ [{body_format, binary}]) of
+ {ok, {{"HTTP/1.1",200,"OK"}, _Headers, Body}} ->
+ rabbit_json:try_decode(Body);
+ Other ->
+ Other
+ end.
diff --git a/deps/rabbitmq_peer_discovery_aws/test/config_schema_SUITE.erl b/deps/rabbitmq_peer_discovery_aws/test/config_schema_SUITE.erl
index 87a6edded7..0eeb35a04c 100644
--- a/deps/rabbitmq_peer_discovery_aws/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_aws/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_peer_discovery_aws/test/integration_SUITE.erl b/deps/rabbitmq_peer_discovery_aws/test/integration_SUITE.erl
index e574f3c00c..cbd356ed6a 100644
--- a/deps/rabbitmq_peer_discovery_aws/test/integration_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_aws/test/integration_SUITE.erl
@@ -2,13 +2,17 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(integration_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
+
+-define(CLUSTER_SIZE, 3).
+-define(TIMEOUT_MILLIS, 180_000).
-export([all/0,
suite/0,
@@ -28,7 +32,7 @@ all() ->
suite() ->
[
- {timetrap, {hours, 1}}
+ {timetrap, {minutes, 20}}
].
groups() ->
@@ -38,153 +42,134 @@ groups() ->
].
init_per_suite(Config) ->
- rabbit_ct_helpers:log_environment(),
- Config1 = rabbit_ct_helpers:set_config(
- Config,
- [
- {terraform_files_suffix, rabbit_ct_helpers:random_term_checksum()},
- {terraform_aws_ec2_region, "eu-west-1"},
- {rmq_nodes_clustered, false}
- ]),
- Config2 = init_aws_credentials(Config1),
- rabbit_ct_helpers:run_setup_steps(Config2).
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ %% These test would like passed in mixed versions, but they won't
+ %% actually honor mixed versions as currently specified via env var
+ {skip, "not mixed versions compatible"};
+ _ ->
+ inets:start(),
+ rabbit_ct_helpers:log_environment(),
+ Config1 = rabbit_ct_helpers:set_config(
+ Config, [
+ {ecs_region, "eu-west-1"},
+ {ecs_cluster_name, os:getenv("AWS_ECS_CLUSTER_NAME", "rabbitmq-peer-discovery-aws")},
+ {ecs_profile_name, "rabbitmq-peer-discovery-aws-profile"},
+ {ecs_instance_role, "ecs-peer-discovery-aws"},
+ {ecs_cluster_size, ?CLUSTER_SIZE},
+ {rabbitmq_default_user, "test"},
+ {rabbitmq_default_pass, rabbit_ct_helpers:random_term_checksum()},
+ {rabbitmq_erlang_cookie, rabbit_ct_helpers:random_term_checksum()}
+ ]),
+ Config2 = rabbit_ct_helpers:register_teardown_step(Config1, fun aws_ecs_util:destroy_ecs_cluster/1),
+ rabbit_ct_helpers:run_steps(
+ Config2, [
+ fun rabbit_ct_helpers:init_skip_as_error_flag/1,
+ fun rabbit_ct_helpers:start_long_running_testsuite_monitor/1,
+ fun aws_ecs_util:ensure_aws_cli/1,
+ fun aws_ecs_util:ensure_ecs_cli/1,
+ fun aws_ecs_util:init_aws_credentials/1,
+ fun aws_ecs_util:ensure_rabbitmq_image/1,
+ fun aws_ecs_util:start_ecs_cluster/1
+ ])
+ end.
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config).
init_per_group(using_tags, Config) ->
- TfConfigDir = rabbit_ct_vm_helpers:aws_autoscaling_group_module(Config),
- AccessKeyId = ?config(aws_access_key_id, Config),
- SecretAccessKey = ?config(aws_secret_access_key, Config),
- Suffix = ?config(terraform_files_suffix, Config),
Config1 = rabbit_ct_helpers:set_config(
- Config, {terraform_config_dir, TfConfigDir}),
- rabbit_ct_helpers:merge_app_env(
- Config1,
- {rabbit,
- [{cluster_formation,
- [{peer_discovery_backend, rabbit_peer_discovery_aws},
- {peer_discovery_aws,
- [
- {aws_ec2_region, ?config(terraform_aws_ec2_region, Config)},
- {aws_access_key, AccessKeyId},
- {aws_secret_key, SecretAccessKey},
- {aws_ec2_tags, [{"rabbitmq-testing-suffix", Suffix}]}
- ]}]}]});
+ Config,
+ [{ecs_service_name, "rabbitmq-tagged"}]),
+ rabbit_ct_helpers:run_steps(Config1, [fun register_tagged_task/1,
+ fun aws_ecs_util:create_service/1]);
init_per_group(using_autoscaling_group, Config) ->
- TfConfigDir = rabbit_ct_vm_helpers:aws_autoscaling_group_module(Config),
- AccessKeyId = ?config(aws_access_key_id, Config),
- SecretAccessKey = ?config(aws_secret_access_key, Config),
Config1 = rabbit_ct_helpers:set_config(
- Config, {terraform_config_dir, TfConfigDir}),
- rabbit_ct_helpers:merge_app_env(
- Config1,
- {rabbit,
- [{cluster_formation,
- [{peer_discovery_backend, rabbit_peer_discovery_aws},
- {peer_discovery_aws,
- [
- {aws_ec2_region, ?config(terraform_aws_ec2_region, Config)},
- {aws_access_key, AccessKeyId},
- {aws_secret_key, SecretAccessKey},
- {aws_autoscaling, true}
- ]}]}]}).
+ Config,
+ [{ecs_service_name, "rabbitmq-autoscaled"}]),
+ rabbit_ct_helpers:run_steps(Config1, [fun register_autoscaled_task/1,
+ fun aws_ecs_util:create_service/1]).
end_per_group(_Group, Config) ->
- Config.
+ rabbit_ct_helpers:run_steps(Config, [fun aws_ecs_util:delete_service/1,
+ fun (C) ->
+ % A short delay so that all tasks
+ % associated with the service can
+ % be deregistered
+ timer:sleep(15000),
+ C
+ end,
+ fun aws_ecs_util:deregister_tasks/1]).
init_per_testcase(Testcase, Config) ->
- rabbit_ct_helpers:testcase_started(Config, Testcase),
- InstanceName = rabbit_ct_helpers:testcase_absname(Config, Testcase),
- InstanceCount = 2,
- ClusterSize = InstanceCount,
- Config1 = rabbit_ct_helpers:set_config(
- Config,
- [{terraform_instance_name, InstanceName},
- {terraform_instance_count, InstanceCount},
- {rmq_nodename_suffix, Testcase},
- {rmq_nodes_count, ClusterSize}]),
- rabbit_ct_helpers:run_steps(
- Config1,
- [fun rabbit_ct_broker_helpers:run_make_dist/1] ++
- rabbit_ct_vm_helpers:setup_steps() ++
- rabbit_ct_broker_helpers:setup_steps_for_vms()).
+ rabbit_ct_helpers:testcase_started(Config, Testcase).
end_per_testcase(Testcase, Config) ->
- Config1 = rabbit_ct_helpers:run_steps(
- Config,
- rabbit_ct_broker_helpers:teardown_steps_for_vms() ++
- rabbit_ct_vm_helpers:teardown_steps()),
- rabbit_ct_helpers:testcase_finished(Config1, Testcase).
-
-init_aws_credentials(Config) ->
- AccessKeyId = get_env_var_or_awscli_config_key(
- "AWS_ACCESS_KEY_ID", "aws_access_key_id"),
- SecretAccessKey = get_env_var_or_awscli_config_key(
- "AWS_SECRET_ACCESS_KEY", "aws_secret_access_key"),
- rabbit_ct_helpers:set_config(
- Config,
- [
- {aws_access_key_id, AccessKeyId},
- {aws_secret_access_key, SecretAccessKey}
- ]).
-
-get_env_var_or_awscli_config_key(EnvVar, AwscliKey) ->
- case os:getenv(EnvVar) of
- false -> get_awscli_config_key(AwscliKey);
- Value -> Value
- end.
-
-get_awscli_config_key(AwscliKey) ->
- AwscliConfig = read_awscli_config(),
- maps:get(AwscliKey, AwscliConfig, undefined).
-
-read_awscli_config() ->
- Filename = filename:join([os:getenv("HOME"), ".aws", "credentials"]),
- case filelib:is_regular(Filename) of
- true -> read_awscli_config(Filename);
- false -> #{}
- end.
-
-read_awscli_config(Filename) ->
- {ok, Content} = file:read_file(Filename),
- Lines = string:tokens(binary_to_list(Content), "\n"),
- read_awscli_config(Lines, #{}).
-
-read_awscli_config([Line | Rest], AwscliConfig) ->
- Line1 = string:strip(Line),
- case Line1 of
- [$# | _] ->
- read_awscli_config(Rest, AwscliConfig);
- [$[ | _] ->
- read_awscli_config(Rest, AwscliConfig);
- _ ->
- [Key, Value] = string:tokens(Line1, "="),
- Key1 = string:strip(Key),
- Value1 = string:strip(Value),
- read_awscli_config(Rest, AwscliConfig#{Key1 => Value1})
- end;
-read_awscli_config([], AwscliConfig) ->
- AwscliConfig.
-
-%% -------------------------------------------------------------------
-%% Run arbitrary code.
-%% -------------------------------------------------------------------
+ rabbit_ct_helpers:testcase_finished(Config, Testcase).
cluster_was_formed(Config) ->
- CTPeers = rabbit_ct_vm_helpers:get_ct_peers(Config),
- ?assertEqual(lists:duplicate(length(CTPeers), false),
- [rabbit:is_running(CTPeer) || CTPeer <- CTPeers]),
- RabbitMQNodes = lists:sort(
- rabbit_ct_broker_helpers:get_node_configs(
- Config, nodename)),
- ?assertEqual(lists:duplicate(length(RabbitMQNodes), true),
- [rabbit:is_running(Node) || Node <- RabbitMQNodes]),
-
- ?assertEqual(lists:duplicate(length(RabbitMQNodes), true),
- rabbit_ct_broker_helpers:rpc_all(
- Config, rabbit_mnesia, is_clustered, [])),
- ClusteredNodes = lists:sort(
- rabbit_ct_broker_helpers:rpc(
- Config, 0, rabbit_mnesia, cluster_nodes, [running])),
- ?assertEqual(ClusteredNodes, RabbitMQNodes).
+ {ok, [H1, H2, H3]} = ?awaitMatch({ok, L} when length(L) == ?CLUSTER_SIZE,
+ aws_ecs_util:public_dns_names(Config),
+ ?TIMEOUT_MILLIS),
+
+ [N1Nodes, N2Nodes, N3Nodes] =
+ [begin
+ {ok, R} = ?awaitMatch({ok, R} when is_list(R) andalso length(R) == ?CLUSTER_SIZE,
+ aws_ecs_util:fetch_nodes_endpoint(Config, binary_to_list(H)),
+ ?TIMEOUT_MILLIS),
+ [maps:get(<<"name">>, N) || N <- R]
+ end || H <- [H1, H2, H3]],
+
+ ?assertEqual(lists:sort(N1Nodes), lists:sort(N2Nodes)),
+ ?assertEqual(lists:sort(N2Nodes), lists:sort(N3Nodes)).
+
+register_tagged_task(Config) ->
+ RabbitmqDefaultUser = ?config(rabbitmq_default_user, Config),
+ RabbitmqDefaultPass = ?config(rabbitmq_default_pass, Config),
+ RabbitmqConf = string:join([
+ "default_user = " ++ RabbitmqDefaultUser,
+ "default_pass = " ++ RabbitmqDefaultPass,
+ "cluster_formation.peer_discovery_backend = aws",
+ "cluster_formation.aws.instance_tags.service = rabbitmq",
+ ""
+ ], "\n"),
+ TaskJson = task_json(Config, RabbitmqConf),
+ aws_ecs_util:register_task(Config, TaskJson).
+
+register_autoscaled_task(Config) ->
+ RabbitmqDefaultUser = ?config(rabbitmq_default_user, Config),
+ RabbitmqDefaultPass = ?config(rabbitmq_default_pass, Config),
+ RabbitmqConf = string:join([
+ "default_user = " ++ RabbitmqDefaultUser,
+ "default_pass = " ++ RabbitmqDefaultPass,
+ "cluster_formation.peer_discovery_backend = aws",
+ "cluster_formation.aws.use_autoscaling_group = true",
+ ""
+ ], "\n"),
+ TaskJson = task_json(Config, RabbitmqConf),
+ aws_ecs_util:register_task(Config, TaskJson).
+
+task_json(Config, RabbitmqConf) ->
+ DataDir = ?config(data_dir, Config),
+ RabbitmqImage = ?config(rabbitmq_image, Config),
+ RabbitmqDefaultUser = ?config(rabbitmq_default_user, Config),
+ RabbitmqDefaultPass = ?config(rabbitmq_default_pass, Config),
+ RabbitmqErlangCookie = ?config(rabbitmq_erlang_cookie, Config),
+ ServiceName = ?config(ecs_service_name, Config),
+
+ {ok, Binary} = file:read_file(filename:join(DataDir, "task_definition.json")),
+ TaskDef = rabbit_json:decode(Binary),
+
+ [RabbitContainerDef, SidecarContainerDef] = maps:get(<<"containerDefinitions">>, TaskDef),
+ RabbitContainerDef1 =
+ RabbitContainerDef#{
+ <<"image">> := list_to_binary(RabbitmqImage),
+ <<"environment">> := [#{<<"name">> => <<"RABBITMQ_ERLANG_COOKIE">>,
+ <<"value">> => list_to_binary(RabbitmqErlangCookie)}]
+ },
+ SidecarContainerDef1 =
+ SidecarContainerDef#{<<"environment">> := [#{<<"name">> => <<"DATA">>,
+ <<"value">> => base64:encode(RabbitmqConf)}]},
+ rabbit_json:encode(
+ TaskDef#{<<"family">> := list_to_binary(ServiceName),
+ <<"containerDefinitions">> := [RabbitContainerDef1, SidecarContainerDef1]}).
diff --git a/deps/rabbitmq_peer_discovery_aws/test/integration_SUITE_data/task_definition.json b/deps/rabbitmq_peer_discovery_aws/test/integration_SUITE_data/task_definition.json
new file mode 100644
index 0000000000..354f6a1e2b
--- /dev/null
+++ b/deps/rabbitmq_peer_discovery_aws/test/integration_SUITE_data/task_definition.json
@@ -0,0 +1,77 @@
+{
+ "family": "PLACEHOLDER",
+ "networkMode": "host",
+ "containerDefinitions": [
+ {
+ "name": "rabbitmq",
+ "image": "PLACEHOLDER",
+ "cpu": 256,
+ "memory": 512,
+ "portMappings": [
+ {
+ "containerPort": 5672,
+ "hostPort": 5672,
+ "protocol": "tcp"
+ },
+ {
+ "containerPort": 15672,
+ "hostPort": 15672,
+ "protocol": "tcp"
+ }
+ ],
+ "essential": true,
+ "environment": [
+ {
+ "name": "RABBITMQ_ERLANG_COOKIE",
+ "value": "PLACEHOLDER"
+ }
+ ],
+ "mountPoints": [
+ {
+ "sourceVolume": "rabbitmq-conf-vol",
+ "containerPath": "/etc/rabbitmq"
+ }
+ ],
+ "dependsOn": [
+ {
+ "containerName": "rabbitmq-config",
+ "condition": "COMPLETE"
+ }
+ ]
+ },
+ {
+ "name": "rabbitmq-config",
+ "image": "bash",
+ "cpu": 256,
+ "memory": 512,
+ "essential": false,
+ "command": [
+ "-c",
+ "echo \"[rabbitmq_management, rabbitmq_peer_discovery_aws].\" > /etc/rabbitmq/enabled_plugins && echo $DATA | base64 -d - | tee /etc/rabbitmq/rabbitmq.conf"
+ ],
+ "environment": [
+ {
+ "name": "DATA",
+ "value": "PLACEHOLDER"
+ }
+ ],
+ "mountPoints": [
+ {
+ "sourceVolume": "rabbitmq-conf-vol",
+ "containerPath": "/etc/rabbitmq"
+ }
+ ]
+ }
+ ],
+ "volumes": [
+ {
+ "name": "rabbitmq-conf-vol",
+ "host": {
+ "sourcePath": "/rabbitmq-conf-vol"
+ }
+ }
+ ],
+ "requiresCompatibilities": [
+ "EC2"
+ ]
+}
diff --git a/deps/rabbitmq_peer_discovery_aws/test/rabbitmq_peer_discovery_aws_SUITE.erl b/deps/rabbitmq_peer_discovery_aws/test/unit_SUITE.erl
index 3894e99d34..e9c3d14112 100644
--- a/deps/rabbitmq_peer_discovery_aws/test/rabbitmq_peer_discovery_aws_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_aws/test/unit_SUITE.erl
@@ -2,10 +2,10 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
--module(rabbitmq_peer_discovery_aws_SUITE).
+-module(unit_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
@@ -14,7 +14,8 @@
all() ->
[
- {group, unit}
+ {group, unit},
+ {group, lock}
].
groups() ->
@@ -23,7 +24,14 @@ groups() ->
maybe_add_tag_filters,
get_hostname_name_from_reservation_set,
registration_support
- ]}].
+ ]},
+ {lock, [], [
+ lock_single_node,
+ lock_multiple_nodes,
+ lock_local_node_not_discovered,
+ lock_list_nodes_fails
+ ]}
+ ].
%%%
%%% Testcases
@@ -63,7 +71,39 @@ get_hostname_name_from_reservation_set(_Config) ->
}.
registration_support(_Config) ->
- ?assertEqual(rabbit_peer_discovery_aws:supports_registration(), true).
+ ?assertEqual(false, rabbit_peer_discovery_aws:supports_registration()).
+
+lock_single_node(_Config) ->
+ LocalNode = node(),
+ Nodes = [LocalNode],
+ meck:expect(rabbit_peer_discovery_aws, list_nodes, 0, {ok, {Nodes, disc}}),
+
+ {ok, {LockId, Nodes}} = rabbit_peer_discovery_aws:lock(LocalNode),
+ ?assertEqual(ok, rabbit_peer_discovery_aws:unlock({LockId, Nodes})).
+
+lock_multiple_nodes(_Config) ->
+ application:set_env(rabbit, cluster_formation, [{internal_lock_retries, 2}]),
+ LocalNode = node(),
+ OtherNode = other@host,
+ Nodes = [OtherNode, LocalNode],
+ meck:expect(rabbit_peer_discovery_aws, list_nodes, 0, {ok, {Nodes, disc}}),
+
+ {ok, {{LockResourceId, OtherNode}, Nodes}} = rabbit_peer_discovery_aws:lock(OtherNode),
+ ?assertEqual({error, "Acquiring lock taking too long, bailing out after 2 retries"},
+ rabbit_peer_discovery_aws:lock(LocalNode)),
+ ?assertEqual(ok, rabbitmq_peer_discovery_aws:unlock({{LockResourceId, OtherNode}, Nodes})),
+
+ ?assertEqual({ok, {{LockResourceId, LocalNode}, Nodes}}, rabbit_peer_discovery_aws:lock(LocalNode)),
+ ?assertEqual(ok, rabbitmq_peer_discovery_aws:unlock({{LockResourceId, LocalNode}, Nodes})).
+
+lock_local_node_not_discovered(_Config) ->
+ meck:expect(rabbit_peer_discovery_aws, list_nodes, 0, {ok, {[n1@host, n2@host], disc}} ),
+ Expectation = {error, "Local node me@host is not part of discovered nodes [n1@host,n2@host]"},
+ ?assertEqual(Expectation, rabbit_peer_discovery_aws:lock(me@host)).
+
+lock_list_nodes_fails(_Config) ->
+ meck:expect(rabbit_peer_discovery_aws, list_nodes, 0, {error, "failed for some reason"}),
+ ?assertEqual({error, "failed for some reason"}, rabbit_peer_discovery_aws:lock(me@host)).
%%%
%%% Implementation
diff --git a/deps/rabbitmq_peer_discovery_common/BUILD.bazel b/deps/rabbitmq_peer_discovery_common/BUILD.bazel
new file mode 100644
index 0000000000..c1f917f2aa
--- /dev/null
+++ b/deps/rabbitmq_peer_discovery_common/BUILD.bazel
@@ -0,0 +1,69 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_ERLC_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_peer_discovery_common"
+
+APP_DESCRIPTION = "Modules shared by various peer discovery backends"
+
+APP_MODULE = "rabbit_peer_discovery_common_app"
+
+EXTRA_APPS = [
+ "inets",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ extra_apps = EXTRA_APPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_peer_discovery_common"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_peer_discovery_common/Makefile b/deps/rabbitmq_peer_discovery_common/Makefile
index c1d3dcaffa..d7a42a699c 100644
--- a/deps/rabbitmq_peer_discovery_common/Makefile
+++ b/deps/rabbitmq_peer_discovery_common/Makefile
@@ -16,5 +16,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_peer_discovery_common/erlang.mk b/deps/rabbitmq_peer_discovery_common/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_peer_discovery_common/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_peer_discovery_common/include/rabbit_peer_discovery.hrl b/deps/rabbitmq_peer_discovery_common/include/rabbit_peer_discovery.hrl
index 14aa6ad3b8..271ee08d82 100644
--- a/deps/rabbitmq_peer_discovery_common/include/rabbit_peer_discovery.hrl
+++ b/deps/rabbitmq_peer_discovery_common/include/rabbit_peer_discovery.hrl
@@ -2,9 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
+-include_lib("rabbit_common/include/logging.hrl").
+
+-define(RMQLOG_DOMAIN_PEER_DIS, ?DEFINE_RMQLOG_DOMAIN(peer_discovery)).
+
% rabbitmq/rabbitmq-peer-discovery-aws#25
% Note: this timeout must not be greater than the default
% gen_server:call timeout of 5000ms. This `timeout`,
@@ -12,7 +16,7 @@
% by `httpc`
-define(DEFAULT_HTTP_TIMEOUT, 2250).
--type peer_discovery_config_value() :: atom() | integer() | string() | undefined.
+-type peer_discovery_config_value() :: atom() | integer() | string() | list() | map() | any() | undefined.
-record(peer_discovery_config_entry_meta,
{env_variable :: string(),
diff --git a/deps/rabbitmq_peer_discovery_common/rabbitmq-components.mk b/deps/rabbitmq_peer_discovery_common/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_peer_discovery_common/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_cleanup.erl b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_cleanup.erl
index 9adc972b14..635ee08457 100644
--- a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_cleanup.erl
+++ b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_cleanup.erl
@@ -4,13 +4,14 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_cleanup).
-behaviour(gen_server).
--include("include/rabbit_peer_discovery.hrl").
+-include_lib("kernel/include/logger.hrl").
+-include("rabbit_peer_discovery.hrl").
-export([start_link/0,
check_cluster/0]).
@@ -84,7 +85,9 @@ init([]) ->
Map = ?CONFIG_MODULE:config_map(?CONFIG_KEY),
case map_size(Map) of
0 ->
- rabbit_log:info("Peer discovery: node cleanup is disabled", []),
+ ?LOG_INFO(
+ "Peer discovery: node cleanup is disabled",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
{ok, #state{}};
_ ->
Interval = ?CONFIG_MODULE:get(cleanup_interval, ?CONFIG_MAPPING, Map),
@@ -96,8 +99,10 @@ init([]) ->
true -> "will only log warnings";
false -> "will remove nodes not known to the discovery backend"
end,
- rabbit_log:info("Peer discovery: enabling node cleanup (~s). Check interval: ~p seconds.",
- [WarnMsg, State#state.interval]),
+ ?LOG_INFO(
+ "Peer discovery: enabling node cleanup (~s). Check interval: ~p seconds.",
+ [WarnMsg, State#state.interval],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
{ok, State}
end.
@@ -118,7 +123,9 @@ init([]) ->
{stop, Reason :: term(), NewState :: #state{}}).
handle_call(check_cluster, _From, State) ->
- rabbit_log:debug("Peer discovery: checking for partitioned nodes to clean up."),
+ ?LOG_DEBUG(
+ "Peer discovery: checking for partitioned nodes to clean up.",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
maybe_cleanup(State),
{reply, ok, State};
handle_call(_Request, _From, State) ->
@@ -226,19 +233,27 @@ maybe_cleanup(State) ->
-spec maybe_cleanup(State :: #state{},
UnreachableNodes :: [node()]) -> ok.
maybe_cleanup(_, []) ->
- rabbit_log:debug("Peer discovery: all known cluster nodes are up.");
+ ?LOG_DEBUG(
+ "Peer discovery: all known cluster nodes are up.",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS});
maybe_cleanup(State, UnreachableNodes) ->
- rabbit_log:debug("Peer discovery: cleanup discovered unreachable nodes: ~p",
- [UnreachableNodes]),
+ ?LOG_DEBUG(
+ "Peer discovery: cleanup discovered unreachable nodes: ~p",
+ [UnreachableNodes],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
case lists:subtract(UnreachableNodes, service_discovery_nodes()) of
[] ->
- rabbit_log:debug("Peer discovery: all unreachable nodes are still "
- "registered with the discovery backend ~p",
- [rabbit_peer_discovery:backend()]),
+ ?LOG_DEBUG(
+ "Peer discovery: all unreachable nodes are still "
+ "registered with the discovery backend ~p",
+ [rabbit_peer_discovery:backend()],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
ok;
Nodes ->
- rabbit_log:debug("Peer discovery: unreachable nodes are not registered "
- "with the discovery backend ~p", [Nodes]),
+ ?LOG_DEBUG(
+ "Peer discovery: unreachable nodes are not registered "
+ "with the discovery backend ~p", [Nodes],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
maybe_remove_nodes(Nodes, State#state.warn_only)
end.
@@ -254,10 +269,14 @@ maybe_cleanup(State, UnreachableNodes) ->
WarnOnly :: true | false) -> ok.
maybe_remove_nodes([], _) -> ok;
maybe_remove_nodes([Node | Nodes], true) ->
- rabbit_log:warning("Peer discovery: node ~s is unreachable", [Node]),
+ ?LOG_WARNING(
+ "Peer discovery: node ~s is unreachable", [Node],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
maybe_remove_nodes(Nodes, true);
maybe_remove_nodes([Node | Nodes], false) ->
- rabbit_log:warning("Peer discovery: removing unknown node ~s from the cluster", [Node]),
+ ?LOG_WARNING(
+ "Peer discovery: removing unknown node ~s from the cluster", [Node],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
rabbit_mnesia:forget_cluster_node(Node, false),
maybe_remove_nodes(Nodes, false).
@@ -288,11 +307,15 @@ service_discovery_nodes() ->
Module = rabbit_peer_discovery:backend(),
case rabbit_peer_discovery:normalize(Module:list_nodes()) of
{ok, {Nodes, _Type}} ->
- rabbit_log:debug("Peer discovery cleanup: ~p returned ~p",
- [Module, Nodes]),
+ ?LOG_DEBUG(
+ "Peer discovery cleanup: ~p returned ~p",
+ [Module, Nodes],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
Nodes;
{error, Reason} ->
- rabbit_log:debug("Peer discovery cleanup: ~p returned error ~p",
- [Module, Reason]),
+ ?LOG_DEBUG(
+ "Peer discovery cleanup: ~p returned error ~p",
+ [Module, Reason],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
[]
end.
diff --git a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_app.erl b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_app.erl
index ee9cc02b7a..b0a5e68eb5 100644
--- a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_app.erl
+++ b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_common_app).
diff --git a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_sup.erl b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_sup.erl
index 8166be5013..aac670bbd6 100644
--- a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_sup.erl
+++ b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_common_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_common_sup).
diff --git a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_config.erl b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_config.erl
index e5e14aa67f..14fcbdb843 100644
--- a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_config.erl
+++ b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_config.erl
@@ -4,11 +4,12 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_config).
+-include_lib("kernel/include/logger.hrl").
-include("rabbit_peer_discovery.hrl").
-export([get/3, get_integer/3, config_map/1]).
@@ -24,7 +25,10 @@
get(Key, Mapping, Config) ->
case maps:is_key(Key, Mapping) of
false ->
- rabbit_log:error("Key ~s is not found in peer discovery config mapping ~p!", [Key, Mapping]),
+ ?LOG_ERROR(
+ "Key ~s is not found in peer discovery config mapping ~p!",
+ [Key, Mapping],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
throw({badkey, Key});
true ->
get_with_entry_meta(Key, maps:get(Key, Mapping), Config)
@@ -37,7 +41,10 @@ get(Key, Mapping, Config) ->
get_integer(Key, Mapping, Config) ->
case maps:is_key(Key, Mapping) of
false ->
- rabbit_log:error("Key ~s is not found in peer discovery config mapping ~p!", [Key, Mapping]),
+ ?LOG_ERROR(
+ "Key ~s is not found in peer discovery config mapping ~p!",
+ [Key, Mapping],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
throw({badkey, Key});
true ->
get_integer_with_entry_meta(Key, maps:get(Key, Mapping), Config)
diff --git a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_httpc.erl b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_httpc.erl
index 44062465c6..f0c734adc4 100644
--- a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_httpc.erl
+++ b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_httpc.erl
@@ -4,12 +4,13 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_httpc).
--include("include/rabbit_peer_discovery.hrl").
+-include_lib("kernel/include/logger.hrl").
+-include("rabbit_peer_discovery.hrl").
%%
%% API
@@ -138,10 +139,10 @@ get(Scheme, Host, Port, Path, Args) ->
%%
get(Scheme, Host, Port, Path, Args, Headers, HttpOpts) ->
URL = build_uri(Scheme, Host, Port, Path, Args),
- rabbit_log:debug("GET ~s", [URL]),
+ ?LOG_DEBUG("GET ~s", [URL], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
HttpOpts1 = ensure_timeout(HttpOpts),
Response = httpc:request(get, {URL, Headers}, HttpOpts1, []),
- rabbit_log:debug("Response: ~p", [Response]),
+ ?LOG_DEBUG("Response: ~p", [Response], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
parse_response(Response).
@@ -176,10 +177,10 @@ post(Scheme, Host, Port, Path, Args, Body) ->
%%
post(Scheme, Host, Port, Path, Args, Headers, HttpOpts, Body) ->
URL = build_uri(Scheme, Host, Port, Path, Args),
- rabbit_log:debug("POST ~s [~p]", [URL, Body]),
+ ?LOG_DEBUG("POST ~s [~p]", [URL, Body], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
HttpOpts1 = ensure_timeout(HttpOpts),
Response = httpc:request(post, {URL, Headers, ?CONTENT_JSON, Body}, HttpOpts1, []),
- rabbit_log:debug("Response: [~p]", [Response]),
+ ?LOG_DEBUG("Response: [~p]", [Response], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
parse_response(Response).
@@ -205,10 +206,10 @@ post(Scheme, Host, Port, Path, Args, Headers, HttpOpts, Body) ->
Body :: string() | binary() | tuple().
put(Scheme, Host, Port, Path, Args, Body) ->
URL = build_uri(Scheme, Host, Port, Path, Args),
- rabbit_log:debug("PUT ~s [~p]", [URL, Body]),
+ ?LOG_DEBUG("PUT ~s [~p]", [URL, Body], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
HttpOpts = ensure_timeout(),
Response = httpc:request(put, {URL, [], ?CONTENT_URLENCODED, Body}, HttpOpts, []),
- rabbit_log:debug("Response: [~p]", [Response]),
+ ?LOG_DEBUG("Response: [~p]", [Response], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
parse_response(Response).
@@ -234,10 +235,10 @@ put(Scheme, Host, Port, Path, Args, Body) ->
Body :: string() | binary() | tuple().
put(Scheme, Host, Port, Path, Args, Headers, Body) ->
URL = build_uri(Scheme, Host, Port, Path, Args),
- rabbit_log:debug("PUT ~s [~p] [~p]", [URL, Headers, Body]),
+ ?LOG_DEBUG("PUT ~s [~p] [~p]", [URL, Headers, Body], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
HttpOpts = ensure_timeout(),
Response = httpc:request(put, {URL, Headers, ?CONTENT_URLENCODED, Body}, HttpOpts, []),
- rabbit_log:debug("Response: [~p]", [Response]),
+ ?LOG_DEBUG("Response: [~p]", [Response], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
parse_response(Response).
@@ -258,10 +259,10 @@ delete(Scheme, Host, Port, PathSegments, Args, Body) when is_list(PathSegments)
delete(Scheme, Host, Port, Path, Args, Body);
delete(Scheme, Host, Port, Path, Args, Body) ->
URL = build_uri(Scheme, Host, Port, Path, Args),
- rabbit_log:debug("DELETE ~s [~p]", [URL, Body]),
+ ?LOG_DEBUG("DELETE ~s [~p]", [URL, Body], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
HttpOpts = ensure_timeout(),
Response = httpc:request(delete, {URL, [], ?CONTENT_URLENCODED, Body}, HttpOpts, []),
- rabbit_log:debug("Response: [~p]", [Response]),
+ ?LOG_DEBUG("Response: [~p]", [Response], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
parse_response(Response).
@@ -275,14 +276,18 @@ maybe_configure_proxy() ->
Map = ?CONFIG_MODULE:config_map(?CONFIG_KEY),
case map_size(Map) of
0 ->
- rabbit_log:debug("HTTP client proxy is not configured"),
+ ?LOG_DEBUG(
+ "HTTP client proxy is not configured",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
ok;
_ ->
HttpProxy = ?CONFIG_MODULE:get(http_proxy, ?CONFIG_MAPPING, Map),
HttpsProxy = ?CONFIG_MODULE:get(https_proxy, ?CONFIG_MAPPING, Map),
ProxyExclusions = ?CONFIG_MODULE:get(proxy_exclusions, ?CONFIG_MAPPING, Map),
- rabbit_log:debug("Configured HTTP proxy: ~p, HTTPS proxy: ~p, exclusions: ~p",
- [HttpProxy, HttpsProxy, ProxyExclusions]),
+ ?LOG_DEBUG(
+ "Configured HTTP proxy: ~p, HTTPS proxy: ~p, exclusions: ~p",
+ [HttpProxy, HttpsProxy, ProxyExclusions],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
maybe_set_proxy(proxy, HttpProxy, ProxyExclusions),
maybe_set_proxy(https_proxy, HttpsProxy, ProxyExclusions),
ok
@@ -315,9 +320,10 @@ maybe_set_proxy(Option, ProxyUrl, ProxyExclusions) ->
UriMap ->
Host = maps:get(host, UriMap),
Port = maps:get(port, UriMap, 80),
- rabbit_log:debug(
+ ?LOG_DEBUG(
"Configuring HTTP client's ~s setting: ~p, exclusions: ~p",
- [Option, {Host, Port}, ProxyExclusions]),
+ [Option, {Host, Port}, ProxyExclusions],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
httpc:set_option(Option, {{Host, Port}, ProxyExclusions})
end.
@@ -360,9 +366,11 @@ decode_body(?CONTENT_JSON, Body) ->
{ok, Value} ->
Value;
{error, Err} ->
- rabbit_log:error("HTTP client could not decode a JSON payload "
- "(JSON parser returned an error): ~p.~n",
- [Err]),
+ ?LOG_ERROR(
+ "HTTP client could not decode a JSON payload "
+ "(JSON parser returned an error): ~p.",
+ [Err],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
[]
end.
@@ -375,14 +383,14 @@ decode_body(?CONTENT_JSON, Body) ->
-spec parse_response({ok, integer(), string()} | {error, any()}) -> {ok, string()} | {error, any()}.
parse_response({error, Reason}) ->
- rabbit_log:debug("HTTP error ~p", [Reason]),
+ ?LOG_DEBUG("HTTP error ~p", [Reason], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
{error, lists:flatten(io_lib:format("~p", [Reason]))};
parse_response({ok, 200, Body}) -> {ok, decode_body(?CONTENT_JSON, Body)};
parse_response({ok, 201, Body}) -> {ok, decode_body(?CONTENT_JSON, Body)};
parse_response({ok, 204, _}) -> {ok, []};
parse_response({ok, Code, Body}) ->
- rabbit_log:debug("HTTP Response (~p) ~s", [Code, Body]),
+ ?LOG_DEBUG("HTTP Response (~p) ~s", [Code, Body], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
{error, integer_to_list(Code)};
parse_response({ok, {{_,200,_}, Headers, Body}}) ->
@@ -391,7 +399,7 @@ parse_response({ok,{{_,201,_}, Headers, Body}}) ->
{ok, decode_body(proplists:get_value("content-type", Headers, ?CONTENT_JSON), Body)};
parse_response({ok,{{_,204,_}, _, _}}) -> {ok, []};
parse_response({ok,{{_Vsn,Code,_Reason},_,Body}}) ->
- rabbit_log:debug("HTTP Response (~p) ~s", [Code, Body]),
+ ?LOG_DEBUG("HTTP Response (~p) ~s", [Code, Body], #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
{error, integer_to_list(Code)}.
%% @private
diff --git a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_util.erl b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_util.erl
index 90584f13d8..f7b3a529e0 100644
--- a/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_util.erl
+++ b/deps/rabbitmq_peer_discovery_common/src/rabbit_peer_discovery_util.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_util).
@@ -24,6 +24,8 @@
maybe_backend_configured/4
]).
+-include_lib("kernel/include/logger.hrl").
+-include("rabbit_peer_discovery.hrl").
%% Export all for unit tests
-ifdef(TEST).
@@ -90,8 +92,10 @@ as_atom(Value) when is_binary(Value) ->
as_atom(Value) when is_list(Value) ->
list_to_atom(Value);
as_atom(Value) ->
- rabbit_log:error("Unexpected data type for atom value: ~p~n",
- [Value]),
+ ?LOG_ERROR(
+ "Unexpected data type for atom value: ~p",
+ [Value],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
Value.
@@ -109,8 +113,10 @@ as_integer(Value) when is_list(Value) ->
as_integer(Value) when is_integer(Value) ->
Value;
as_integer(Value) ->
- rabbit_log:error("Unexpected data type for integer value: ~p~n",
- [Value]),
+ ?LOG_ERROR(
+ "Unexpected data type for integer value: ~p",
+ [Value],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
Value.
@@ -131,8 +137,10 @@ as_string(Value) when is_integer(Value) ->
as_string(Value) when is_list(Value) ->
lists:flatten(Value);
as_string(Value) ->
- rabbit_log:error("Unexpected data type for list value: ~p~n",
- [Value]),
+ ?LOG_ERROR(
+ "Unexpected data type for list value: ~p",
+ [Value],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
Value.
@@ -310,13 +318,17 @@ as_proplist(List) when is_list(List) ->
[{binary_to_list(K), binary_to_list(V)}
|| {K, V} <- maps:to_list(Map)];
{error, Error} ->
- rabbit_log:error("Unexpected data type for proplist value: ~p. JSON parser returned an error: ~p!~n",
- [Value, Error]),
+ ?LOG_ERROR(
+ "Unexpected data type for proplist value: ~p. JSON parser returned an error: ~p!",
+ [Value, Error],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
[]
end;
as_proplist(Value) ->
- rabbit_log:error("Unexpected data type for proplist value: ~p.~n",
- [Value]),
+ ?LOG_ERROR(
+ "Unexpected data type for proplist value: ~p.",
+ [Value],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
[].
%%--------------------------------------------------------------------
@@ -337,15 +349,19 @@ as_map(List) when is_list(List) ->
{ok, Map} ->
Map;
{error, Error} ->
- rabbit_log:error("Unexpected data type for map value: ~p. JSON parser returned an error: ~p!~n",
- [Value, Error]),
+ ?LOG_ERROR(
+ "Unexpected data type for map value: ~p. JSON parser returned an error: ~p!",
+ [Value, Error],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
[]
end;
as_map(Map) when is_map(Map) ->
Map;
as_map(Value) ->
- rabbit_log:error("Unexpected data type for map value: ~p.~n",
- [Value]),
+ ?LOG_ERROR(
+ "Unexpected data type for map value: ~p.",
+ [Value],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
[].
-spec stringify_error({ok, term()} | {error, term()}) -> {ok, term()} | {error, string()}.
@@ -368,12 +384,18 @@ maybe_backend_configured(BackendConfigKey,
undefined ->
ClusterFormationUndefinedFun();
{ok, ClusterFormation} ->
- rabbit_log:debug("Peer discovery: translated cluster formation configuration: ~p", [ClusterFormation]),
+ ?LOG_DEBUG(
+ "Peer discovery: translated cluster formation configuration: ~p",
+ [ClusterFormation],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
case proplists:get_value(BackendConfigKey, ClusterFormation) of
undefined ->
BackendUndefinedFun();
Proplist ->
- rabbit_log:debug("Peer discovery: cluster formation backend configuration: ~p", [Proplist]),
+ ?LOG_DEBUG(
+ "Peer discovery: cluster formation backend configuration: ~p",
+ [Proplist],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
ConfiguredFun(Proplist)
end
end.
@@ -399,6 +421,8 @@ as_list(Value) when is_list(Value) ->
false -> Value
end;
as_list(Value) ->
- rabbit_log:error("Unexpected data type for list value: ~p~n",
- [Value]),
+ ?LOG_ERROR(
+ "Unexpected data type for list value: ~p",
+ [Value],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
Value.
diff --git a/deps/rabbitmq_peer_discovery_common/test/config_schema_SUITE.erl b/deps/rabbitmq_peer_discovery_common/test/config_schema_SUITE.erl
index 4b9dea3f9e..6eaa8b3cae 100644
--- a/deps/rabbitmq_peer_discovery_common/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_common/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_peer_discovery_consul/BUILD.bazel b/deps/rabbitmq_peer_discovery_consul/BUILD.bazel
new file mode 100644
index 0000000000..9c2a2b4e9d
--- /dev/null
+++ b/deps/rabbitmq_peer_discovery_consul/BUILD.bazel
@@ -0,0 +1,65 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_peer_discovery_consul"
+
+APP_DESCRIPTION = "Consult-based RabbitMQ peer discovery backend"
+
+APP_MODULE = "rabbitmq_peer_discovery_consul_app"
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_peer_discovery_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_peer_discovery_consul"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "rabbitmq_peer_discovery_consul_SUITE",
+ size = "medium",
+ runtime_deps = [
+ "@meck//:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_peer_discovery_consul/Makefile b/deps/rabbitmq_peer_discovery_consul/Makefile
index 48b2f4477c..fe3395c548 100644
--- a/deps/rabbitmq_peer_discovery_consul/Makefile
+++ b/deps/rabbitmq_peer_discovery_consul/Makefile
@@ -15,5 +15,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_peer_discovery_consul/erlang.mk b/deps/rabbitmq_peer_discovery_consul/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_peer_discovery_consul/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_peer_discovery_consul/include/rabbit_peer_discovery_consul.hrl b/deps/rabbitmq_peer_discovery_consul/include/rabbit_peer_discovery_consul.hrl
index 3870e7c1e7..1ddf340be5 100644
--- a/deps/rabbitmq_peer_discovery_consul/include/rabbit_peer_discovery_consul.hrl
+++ b/deps/rabbitmq_peer_discovery_consul/include/rabbit_peer_discovery_consul.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(BACKEND_CONFIG_KEY, peer_discovery_consul).
diff --git a/deps/rabbitmq_peer_discovery_consul/rabbitmq-components.mk b/deps/rabbitmq_peer_discovery_consul/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_peer_discovery_consul/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_peer_discovery_consul/src/rabbit_peer_discovery_consul.erl b/deps/rabbitmq_peer_discovery_consul/src/rabbit_peer_discovery_consul.erl
index 81125988cc..b63e87ae22 100644
--- a/deps/rabbitmq_peer_discovery_consul/src/rabbit_peer_discovery_consul.erl
+++ b/deps/rabbitmq_peer_discovery_consul/src/rabbit_peer_discovery_consul.erl
@@ -4,12 +4,13 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_consul).
-behaviour(rabbit_peer_discovery_backend).
+-include_lib("kernel/include/logger.hrl").
-include_lib("rabbit_common/include/rabbit.hrl").
-include_lib("rabbitmq_peer_discovery_common/include/rabbit_peer_discovery.hrl").
-include("rabbit_peer_discovery_consul.hrl").
@@ -35,7 +36,9 @@
%%
init() ->
- rabbit_log:debug("Peer discovery Consul: initialising..."),
+ ?LOG_DEBUG(
+ "Peer discovery Consul: initialising...",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
ok = application:ensure_started(inets),
%% we cannot start this plugin yet since it depends on the rabbit app,
%% which is in the process of being started by the time this function is called
@@ -48,10 +51,14 @@ init() ->
list_nodes() ->
Fun0 = fun() -> {ok, {[], disc}} end,
Fun1 = fun() ->
- rabbit_log:warning("Peer discovery backend is set to ~s "
- "but final config does not contain rabbit.cluster_formation.peer_discovery_consul. "
- "Cannot discover any nodes because Consul cluster details are not configured!",
- [?MODULE]),
+ ?LOG_WARNING(
+ "Peer discovery backend is set to ~s but final "
+ "config does not contain "
+ "rabbit.cluster_formation.peer_discovery_consul. "
+ "Cannot discover any nodes because Consul cluster "
+ "details are not configured!",
+ [?MODULE],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
{ok, {[], disc}}
end,
Fun2 = fun(Proplist) ->
@@ -86,7 +93,9 @@ register() ->
M = ?CONFIG_MODULE:config_map(?BACKEND_CONFIG_KEY),
case registration_body() of
{ok, Body} ->
- rabbit_log:debug("Consul registration body: ~s", [Body]),
+ ?LOG_DEBUG(
+ "Consul registration body: ~s", [Body],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
case rabbit_peer_discovery_httpc:put(get_config_key(consul_scheme, M),
get_config_key(consul_host, M),
get_integer_config_key(consul_port, M),
@@ -105,7 +114,9 @@ register() ->
unregister() ->
M = ?CONFIG_MODULE:config_map(?BACKEND_CONFIG_KEY),
ID = service_id(),
- rabbit_log:debug("Unregistering with Consul using service ID '~s'", [ID]),
+ ?LOG_DEBUG(
+ "Unregistering with Consul using service ID '~s'", [ID],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
case rabbit_peer_discovery_httpc:put(get_config_key(consul_scheme, M),
get_config_key(consul_host, M),
get_integer_config_key(consul_port, M),
@@ -114,11 +125,16 @@ unregister() ->
maybe_add_acl([]),
[]) of
{ok, Response} ->
- rabbit_log:info("Consul's response to the unregistration attempt: ~p", [Response]),
+ ?LOG_INFO(
+ "Consul's response to the unregistration attempt: ~p",
+ [Response],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
ok;
Error ->
- rabbit_log:info("Failed to unregister service with ID '~s` with Consul: ~p",
- [ID, Error]),
+ ?LOG_INFO(
+ "Failed to unregister service with ID '~s` with Consul: ~p",
+ [ID, Error],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
Error
end.
@@ -135,7 +151,9 @@ post_registration() ->
lock(Node) ->
M = ?CONFIG_MODULE:config_map(?BACKEND_CONFIG_KEY),
- rabbit_log:debug("Effective Consul peer discovery configuration: ~p", [M]),
+ ?LOG_DEBUG(
+ "Effective Consul peer discovery configuration: ~p", [M],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
case create_session(Node, get_config_key(consul_svc_ttl, M)) of
{ok, SessionId} ->
TRef = start_session_ttl_updater(SessionId),
@@ -151,7 +169,9 @@ lock(Node) ->
unlock({SessionId, TRef}) ->
timer:cancel(TRef),
- rabbit_log:debug("Stopped session renewal"),
+ ?LOG_DEBUG(
+ "Stopped session renewal",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
case release_lock(SessionId) of
{ok, true} ->
ok;
@@ -253,8 +273,10 @@ registration_body() ->
registration_body({ok, Body}) ->
{ok, rabbit_data_coercion:to_binary(Body)};
registration_body({error, Reason}) ->
- rabbit_log:error("Error serializing the request body: ~p",
- [Reason]),
+ ?LOG_ERROR(
+ "Error serializing the request body: ~p",
+ [Reason],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
{error, Reason}.
@@ -300,8 +322,10 @@ registration_body_maybe_add_check(Payload) ->
registration_body_maybe_add_check(Payload, undefined) ->
case registration_body_maybe_add_deregister([]) of
[{'DeregisterCriticalServiceAfter', _}]->
- rabbit_log:warning("Can't use Consul's service deregistration feature without " ++
- "using TTL. The parameter will be ignored"),
+ ?LOG_WARNING(
+ "Can't use Consul's service deregistration feature without "
+ "using TTL. The parameter will be ignored",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
Payload;
_ -> Payload
@@ -376,9 +400,11 @@ registration_body_maybe_add_meta(Payload, ClusterName, Meta) ->
-spec validate_addr_parameters(false | true, false | true) -> false | true.
validate_addr_parameters(false, true) ->
- rabbit_log:warning("The parameter CONSUL_SVC_ADDR_NODENAME" ++
- " can be used only if CONSUL_SVC_ADDR_AUTO is true." ++
- " CONSUL_SVC_ADDR_NODENAME value will be ignored."),
+ ?LOG_WARNING(
+ "The parameter CONSUL_SVC_ADDR_NODENAME"
+ " can be used only if CONSUL_SVC_ADDR_AUTO is true."
+ " CONSUL_SVC_ADDR_NODENAME value will be ignored.",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
false;
validate_addr_parameters(_, _) ->
true.
@@ -455,7 +481,9 @@ maybe_add_domain(Value) ->
send_health_check_pass() ->
Service = string:join(["service", service_id()], ":"),
M = ?CONFIG_MODULE:config_map(?BACKEND_CONFIG_KEY),
- rabbit_log:debug("Running Consul health check"),
+ ?LOG_DEBUG(
+ "Running Consul health check",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
case rabbit_peer_discovery_httpc:put(get_config_key(consul_scheme, M),
get_config_key(consul_host, M),
get_integer_config_key(consul_port, M),
@@ -466,22 +494,31 @@ send_health_check_pass() ->
{ok, []} -> ok;
{error, "429"} ->
%% Too Many Requests, see https://www.consul.io/docs/agent/checks.html
- rabbit_log:warning("Consul responded to a health check with 429 Too Many Requests"),
+ ?LOG_WARNING(
+ "Consul responded to a health check with 429 Too Many Requests",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
ok;
{error, "500"} ->
- rabbit_log:warning("Consul responded to a health check with a 500 status, will wait and try re-registering"),
+ ?LOG_WARNING(
+ "Consul responded to a health check with a 500 status, will "
+ "wait and try re-registering",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
maybe_re_register(wait_for_list_nodes()),
ok;
{error, Reason} ->
- rabbit_log:error("Error running Consul health check: ~p",
- [Reason]),
+ ?LOG_ERROR(
+ "Error running Consul health check: ~p",
+ [Reason],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
ok
end.
maybe_re_register({error, Reason}) ->
- rabbit_log:error("Internal error in Consul while updating health check. "
- "Cannot obtain list of nodes registered in Consul either: ~p",
- [Reason]);
+ ?LOG_ERROR(
+ "Internal error in Consul while updating health check. "
+ "Cannot obtain list of nodes registered in Consul either: ~p",
+ [Reason],
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS});
maybe_re_register({ok, {Members, _NodeType}}) ->
maybe_re_register(Members);
maybe_re_register({ok, Members}) ->
@@ -489,11 +526,14 @@ maybe_re_register({ok, Members}) ->
maybe_re_register(Members) ->
case lists:member(node(), Members) of
true ->
- rabbit_log:error("Internal error in Consul while updating health check",
- []);
+ ?LOG_ERROR(
+ "Internal error in Consul while updating health check",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS});
false ->
- rabbit_log:error("Internal error in Consul while updating health check, "
- "node is not registered. Re-registering", []),
+ ?LOG_ERROR(
+ "Internal error in Consul while updating health check, "
+ "node is not registered. Re-registering",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
register()
end.
@@ -588,7 +628,9 @@ get_session_id(#{<<"ID">> := ID}) -> binary:bin_to_list(ID).
start_session_ttl_updater(SessionId) ->
M = ?CONFIG_MODULE:config_map(?BACKEND_CONFIG_KEY),
Interval = get_config_key(consul_svc_ttl, M),
- rabbit_log:debug("Starting session renewal"),
+ ?LOG_DEBUG(
+ "Starting session renewal",
+ #{domain => ?RMQLOG_DOMAIN_PEER_DIS}),
{ok, TRef} = timer:apply_interval(Interval * 500, ?MODULE,
session_ttl_update_callback, [SessionId]),
TRef.
diff --git a/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul.erl b/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul.erl
index a46ff0aa73..5ad5fc4309 100644
--- a/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul.erl
+++ b/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_consul).
diff --git a/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_app.erl b/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_app.erl
index d2dbea94be..a44df56113 100644
--- a/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_app.erl
+++ b/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_consul_app).
diff --git a/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_health_check_helper.erl b/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_health_check_helper.erl
index f81787f4a9..6682300ee7 100644
--- a/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_health_check_helper.erl
+++ b/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_health_check_helper.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This gen_server starts a periodic timer on behalf of
diff --git a/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_sup.erl b/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_sup.erl
index 8f232c4b26..e7addf230d 100644
--- a/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_sup.erl
+++ b/deps/rabbitmq_peer_discovery_consul/src/rabbitmq_peer_discovery_consul_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_consul_sup).
diff --git a/deps/rabbitmq_peer_discovery_consul/test/config_schema_SUITE.erl b/deps/rabbitmq_peer_discovery_consul/test/config_schema_SUITE.erl
index dc497b1736..453c3f356b 100644
--- a/deps/rabbitmq_peer_discovery_consul/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_consul/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_peer_discovery_consul/test/rabbitmq_peer_discovery_consul_SUITE.erl b/deps/rabbitmq_peer_discovery_consul/test/rabbitmq_peer_discovery_consul_SUITE.erl
index ff925b3b34..acbc7690c3 100644
--- a/deps/rabbitmq_peer_discovery_consul/test/rabbitmq_peer_discovery_consul_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_consul/test/rabbitmq_peer_discovery_consul_SUITE.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_consul_SUITE).
@@ -325,7 +325,6 @@ list_nodes_return_value_basic_test(_Config) ->
application:set_env(rabbit, cluster_formation,
[
{peer_discovery_backend, rabbit_peer_discovery_consul},
- {randomized_startup_delay_range, {0, 1}},
{peer_discovery_consul, [
{consul_host, "localhost"},
{consul_port, 8500}
@@ -344,7 +343,6 @@ list_nodes_return_value_basic_long_node_name_test(_Config) ->
application:set_env(rabbit, cluster_formation,
[
{peer_discovery_backend, rabbit_peer_discovery_consul},
- {randomized_startup_delay_range, {0, 1}},
{peer_discovery_consul, [
{consul_host, "localhost"},
{consul_port, 8500},
@@ -364,7 +362,6 @@ list_nodes_return_value_long_node_name_and_custom_domain_test(_Config) ->
application:set_env(rabbit, cluster_formation,
[
{peer_discovery_backend, rabbit_peer_discovery_consul},
- {randomized_startup_delay_range, {0, 1}},
{peer_discovery_consul, [
{consul_host, "localhost"},
{consul_port, 8500},
@@ -385,7 +382,6 @@ list_nodes_return_value_srv_address_test(_Config) ->
application:set_env(rabbit, cluster_formation,
[
{peer_discovery_backend, rabbit_peer_discovery_consul},
- {randomized_startup_delay_range, {0, 1}},
{peer_discovery_consul, [
{consul_host, "localhost"},
{consul_port, 8500}
@@ -404,7 +400,6 @@ list_nodes_return_value_nodes_in_warning_state_included_test(_Config) ->
application:set_env(rabbit, cluster_formation,
[
{peer_discovery_backend, rabbit_peer_discovery_consul},
- {randomized_startup_delay_range, {0, 1}},
{peer_discovery_consul, [
{consul_host, "localhost"},
{consul_port, 8500}
@@ -425,7 +420,6 @@ list_nodes_return_value_nodes_in_warning_state_filtered_out_test(_Config) ->
application:set_env(rabbit, cluster_formation,
[
{peer_discovery_backend, rabbit_peer_discovery_consul},
- {randomized_startup_delay_range, {0, 1}},
{peer_discovery_consul, [
{consul_host, "localhost"},
{consul_port, 8500}
diff --git a/deps/rabbitmq_peer_discovery_etcd/BUILD.bazel b/deps/rabbitmq_peer_discovery_etcd/BUILD.bazel
new file mode 100644
index 0000000000..562ce841db
--- /dev/null
+++ b/deps/rabbitmq_peer_discovery_etcd/BUILD.bazel
@@ -0,0 +1,69 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_peer_discovery_etcd"
+
+APP_DESCRIPTION = "etcd-based RabbitMQ peer discovery backend"
+
+APP_MODULE = "rabbitmq_peer_discovery_etcd_app"
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_peer_discovery_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "@eetcd//:bazel_erlang_lib",
+ "@gun//:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_peer_discovery_etcd"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ size = "small",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_peer_discovery_etcd/Makefile b/deps/rabbitmq_peer_discovery_etcd/Makefile
index 5a1023c2bf..1ba8bfee1f 100644
--- a/deps/rabbitmq_peer_discovery_etcd/Makefile
+++ b/deps/rabbitmq_peer_discovery_etcd/Makefile
@@ -17,5 +17,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_peer_discovery_etcd/erlang.mk b/deps/rabbitmq_peer_discovery_etcd/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_peer_discovery_etcd/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_peer_discovery_etcd/include/rabbit_peer_discovery_etcd.hrl b/deps/rabbitmq_peer_discovery_etcd/include/rabbit_peer_discovery_etcd.hrl
index a4764f3e80..335fb3d026 100644
--- a/deps/rabbitmq_peer_discovery_etcd/include/rabbit_peer_discovery_etcd.hrl
+++ b/deps/rabbitmq_peer_discovery_etcd/include/rabbit_peer_discovery_etcd.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(BACKEND_CONFIG_KEY, peer_discovery_etcd).
diff --git a/deps/rabbitmq_peer_discovery_etcd/rabbitmq-components.mk b/deps/rabbitmq_peer_discovery_etcd/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_peer_discovery_etcd/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_peer_discovery_etcd/src/rabbit_peer_discovery_etcd.erl b/deps/rabbitmq_peer_discovery_etcd/src/rabbit_peer_discovery_etcd.erl
index 0aec413edd..4216339d19 100644
--- a/deps/rabbitmq_peer_discovery_etcd/src/rabbit_peer_discovery_etcd.erl
+++ b/deps/rabbitmq_peer_discovery_etcd/src/rabbit_peer_discovery_etcd.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_etcd).
diff --git a/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd.erl b/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd.erl
index d9778b0afd..0b0cb5723c 100644
--- a/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd.erl
+++ b/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_etcd).
diff --git a/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_app.erl b/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_app.erl
index e43a820385..9ce799389a 100644
--- a/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_app.erl
+++ b/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_etcd_app).
diff --git a/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_sup.erl b/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_sup.erl
index 6d220f8092..cfd5bbfcaa 100644
--- a/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_sup.erl
+++ b/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_etcd_sup).
diff --git a/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_v3_client.erl b/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_v3_client.erl
index 864767c25e..60855086f3 100644
--- a/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_v3_client.erl
+++ b/deps/rabbitmq_peer_discovery_etcd/src/rabbitmq_peer_discovery_etcd_v3_client.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_etcd_v3_client).
@@ -39,7 +39,7 @@
%% don't allow node lease key TTL to be lower than this
%% as overly low values can cause annoying timeouts in etcd client operations
-define(MINIMUM_NODE_KEY_LEASE_TTL, 15).
-%% default randomized delay range is 5s to 60s, so this value
+%% default randomized delay range was 5s to 60s, so this value
%% produces a comparable delay
-define(DEFAULT_LOCK_WAIT_TTL, 70).
%% don't allow lock lease TTL to be lower than this
diff --git a/deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE.erl b/deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE.erl
index 0d9e7ceeab..abff997197 100644
--- a/deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE_data/rabbitmq_peer_discovery_etcd.snippets b/deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE_data/rabbitmq_peer_discovery_etcd.snippets
index 2d41aa0dbe..ed1c527f14 100644
--- a/deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE_data/rabbitmq_peer_discovery_etcd.snippets
+++ b/deps/rabbitmq_peer_discovery_etcd/test/config_schema_SUITE_data/rabbitmq_peer_discovery_etcd.snippets
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
[
@@ -20,7 +20,7 @@
]}
], [rabbitmq_peer_discovery_etcd]
},
-
+
{etcd_discovery_mechanism_shortcut,
"cluster_formation.peer_discovery_backend = etcd
cluster_formation.etcd.host = etcd.eng.megacorp.local", [
@@ -34,7 +34,7 @@
]}
], [rabbitmq_peer_discovery_etcd]
},
-
+
%% etcd hostname
{etcd_host, "cluster_formation.etcd.host = etcd.eng.megacorp.local", [
{rabbit, [
diff --git a/deps/rabbitmq_peer_discovery_etcd/test/system_SUITE.erl b/deps/rabbitmq_peer_discovery_etcd/test/system_SUITE.erl
index 14533148b0..bc3cc9234f 100644
--- a/deps/rabbitmq_peer_discovery_etcd/test/system_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_etcd/test/system_SUITE.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
diff --git a/deps/rabbitmq_peer_discovery_etcd/test/unit_SUITE.erl b/deps/rabbitmq_peer_discovery_etcd/test/unit_SUITE.erl
index 10df977f1f..0206eb53c1 100644
--- a/deps/rabbitmq_peer_discovery_etcd/test/unit_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_etcd/test/unit_SUITE.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
diff --git a/deps/rabbitmq_peer_discovery_k8s/BUILD.bazel b/deps/rabbitmq_peer_discovery_k8s/BUILD.bazel
new file mode 100644
index 0000000000..cd0597b507
--- /dev/null
+++ b/deps/rabbitmq_peer_discovery_k8s/BUILD.bazel
@@ -0,0 +1,65 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_peer_discovery_k8s"
+
+APP_DESCRIPTION = "Kubernetes-based RabbitMQ peer discovery backend"
+
+APP_MODULE = "rabbitmq_peer_discovery_k8s_app"
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_peer_discovery_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_peer_discovery_k8s"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "rabbitmq_peer_discovery_k8s_SUITE",
+ size = "small",
+ runtime_deps = [
+ "@meck//:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_peer_discovery_k8s/Makefile b/deps/rabbitmq_peer_discovery_k8s/Makefile
index 12c3ade92a..9a12e3bc10 100644
--- a/deps/rabbitmq_peer_discovery_k8s/Makefile
+++ b/deps/rabbitmq_peer_discovery_k8s/Makefile
@@ -15,5 +15,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_peer_discovery_k8s/erlang.mk b/deps/rabbitmq_peer_discovery_k8s/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_peer_discovery_k8s/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_peer_discovery_k8s/include/rabbit_peer_discovery_k8s.hrl b/deps/rabbitmq_peer_discovery_k8s/include/rabbit_peer_discovery_k8s.hrl
index 9665b67023..e751843931 100644
--- a/deps/rabbitmq_peer_discovery_k8s/include/rabbit_peer_discovery_k8s.hrl
+++ b/deps/rabbitmq_peer_discovery_k8s/include/rabbit_peer_discovery_k8s.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(CONFIG_MODULE, rabbit_peer_discovery_config).
diff --git a/deps/rabbitmq_peer_discovery_k8s/rabbitmq-components.mk b/deps/rabbitmq_peer_discovery_k8s/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_peer_discovery_k8s/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_peer_discovery_k8s/src/rabbit_peer_discovery_k8s.erl b/deps/rabbitmq_peer_discovery_k8s/src/rabbit_peer_discovery_k8s.erl
index 3990cf8b11..ce02cce6cd 100644
--- a/deps/rabbitmq_peer_discovery_k8s/src/rabbit_peer_discovery_k8s.erl
+++ b/deps/rabbitmq_peer_discovery_k8s/src/rabbit_peer_discovery_k8s.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_peer_discovery_k8s).
@@ -14,9 +14,9 @@
-include_lib("rabbitmq_peer_discovery_common/include/rabbit_peer_discovery.hrl").
-include("rabbit_peer_discovery_k8s.hrl").
+
-export([init/0, list_nodes/0, supports_registration/0, register/0, unregister/0,
- post_registration/0, lock/1, unlock/1, randomized_startup_delay_range/0,
- send_event/3, generate_v1_event/7]).
+ post_registration/0, lock/1, unlock/1, send_event/3, generate_v1_event/7]).
-ifdef(TEST).
-compile(export_all).
@@ -38,24 +38,27 @@ init() ->
-spec list_nodes() -> {ok, {Nodes :: list(), NodeType :: rabbit_types:node_type()}} | {error, Reason :: string()}.
list_nodes() ->
- case make_request() of
- {ok, Response} ->
- Addresses = extract_node_list(Response),
- {ok, {lists:map(fun node_name/1, Addresses), disc}};
- {error, Reason} ->
- Details = io_lib:format("Failed to fetch a list of nodes from Kubernetes API: ~s", [Reason]),
- rabbit_log:error(Details),
- send_event("Warning", "Failed", Details),
- {error, Reason}
- end.
+ case make_request() of
+ {ok, Response} ->
+ Addresses = extract_node_list(Response),
+ Nodes = lists:map(fun node_name/1, Addresses),
+ {ok, {Nodes, disc}};
+ {error, Reason} ->
+ Details = io_lib:format("Failed to fetch a list of nodes from Kubernetes API: ~s", [Reason]),
+ rabbit_log:error(Details),
+ send_event("Warning", "Failed", Details),
+ {error, Reason}
+ end.
-spec supports_registration() -> boolean().
supports_registration() ->
- %% see rabbitmq-peer-discovery-aws#17,
- %% rabbitmq-peer-discovery-k8s#23
- true.
+ true. %% to send event in post_registration/0
+-spec post_registration() -> ok | {error, Reason :: string()}.
+post_registration() ->
+ Details = io_lib:format("Node ~s is registered", [node()]),
+ send_event("Normal", "Created", Details).
-spec register() -> ok.
register() ->
@@ -65,29 +68,39 @@ register() ->
unregister() ->
ok.
--spec post_registration() -> ok | {error, Reason :: string()}.
-post_registration() ->
- Details = io_lib:format("Node ~s is registered", [node()]),
- send_event("Normal", "Created", Details).
-
--spec lock(Node :: atom()) -> not_supported.
-
-lock(_Node) ->
- not_supported.
-
--spec unlock(Data :: term()) -> ok.
-
-unlock(_Data) ->
+-spec lock(Node :: node()) -> {ok, {{ResourceId :: string(), LockRequesterId :: node()}, Nodes :: [node()]}} |
+ {error, Reason :: string()}.
+
+lock(Node) ->
+ %% call list_nodes/0 externally such that meck can mock the function
+ case ?MODULE:list_nodes() of
+ {ok, {Nodes, disc}} ->
+ case lists:member(Node, Nodes) of
+ true ->
+ rabbit_log:info("Will try to lock connecting to nodes ~p", [Nodes]),
+ LockId = rabbit_nodes:lock_id(Node),
+ Retries = rabbit_nodes:lock_retries(),
+ case global:set_lock(LockId, Nodes, Retries) of
+ true ->
+ {ok, {LockId, Nodes}};
+ false ->
+ {error, io_lib:format("Acquiring lock taking too long, bailing out after ~b retries", [Retries])}
+ end;
+ false ->
+ %% Don't try to acquire the global lock when local node is not discoverable by peers.
+ %% This branch is just an additional safety check. We should never run into this branch
+ %% because the local Pod is in state 'Running' and we listed both ready and not-ready addresses.
+ {error, lists:flatten(io_lib:format("Local node ~s is not part of discovered nodes ~p", [Node, Nodes]))}
+ end;
+ {error, _} = Error ->
+ Error
+ end.
+
+-spec unlock({{ResourceId :: string(), LockRequestedId :: atom()}, Nodes :: [atom()]}) -> 'ok'.
+unlock({LockId, Nodes}) ->
+ global:del_lock(LockId, Nodes),
ok.
--spec randomized_startup_delay_range() -> {integer(), integer()}.
-
-randomized_startup_delay_range() ->
- %% Pods in a stateful set are initialized one by one,
- %% so RSD is not really necessary for this plugin.
- %% See https://www.rabbitmq.com/cluster-formation.html#peer-discovery-k8s for details.
- {0, 2}.
-
%%
%% Implementation
%%
@@ -125,32 +138,28 @@ node_name(Address) ->
?UTIL_MODULE:node_name(
?UTIL_MODULE:as_string(Address) ++ get_config_key(k8s_hostname_suffix, M)).
-
-%% @spec maybe_ready_address(k8s_subsets()) -> list()
-%% @doc Return a list of ready nodes
-%% SubSet can contain also "notReadyAddresses"
+%% @spec address(k8s_subsets()) -> list()
+%% @doc Return a list of both ready and not-ready nodes.
+%% For the purpose of peer discovery, consider both ready and not-ready addresses.
+%% Discover peers as quickly as possible not waiting for their readiness check to succeed.
%% @end
%%
--spec maybe_ready_address([map()]) -> list().
+-spec address([map()]) -> list().
-maybe_ready_address(Subset) ->
- case maps:get(<<"notReadyAddresses">>, Subset, undefined) of
- undefined -> ok;
- NotReadyAddresses ->
- Formatted = string:join([binary_to_list(get_address(X)) || X <- NotReadyAddresses], ", "),
- rabbit_log:info("k8s endpoint listing returned nodes not yet ready: ~s", [Formatted])
- end,
- maps:get(<<"addresses">>, Subset, []).
+address(Subset) ->
+ maps:get(<<"notReadyAddresses">>, Subset, []) ++
+ maps:get(<<"addresses">>, Subset, []).
%% @doc Return a list of nodes
-%% see https://kubernetes.io/docs/api-reference/v1/definitions/#_v1_endpoints
+%% "The set of all endpoints is the union of all subsets."
+%% https://kubernetes.io/docs/reference/kubernetes-api/service-resources/endpoints-v1/
%% @end
%%
-spec extract_node_list(map()) -> list().
extract_node_list(Response) ->
IpLists = [[get_address(Address)
- || Address <- maybe_ready_address(Subset)] || Subset <- maps:get(<<"subsets">>, Response, [])],
+ || Address <- address(Subset)] || Subset <- maps:get(<<"subsets">>, Response, [])],
sets:to_list(sets:union(lists:map(fun sets:from_list/1, IpLists))).
@@ -228,4 +237,4 @@ send_event(Type, Reason, Message) ->
[{"Authorization", "Bearer " ++ rabbit_data_coercion:to_list(Token1)}],
[{ssl, [{cacertfile, get_config_key(k8s_cert_path, M)}]}],
Body
- ). \ No newline at end of file
+ ).
diff --git a/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s.erl b/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s.erl
index a0d455ef4e..9e2a6e3406 100644
--- a/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s.erl
+++ b/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s.erl
@@ -2,18 +2,17 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-%% This module exists as an alias for rabbit_peer_discovery_aws.
+%% This module exists as an alias for rabbit_peer_discovery_k8s.
%% Some users assume that the discovery module is the same as plugin
%% name. This module tries to fill the naming gap between module and plugin names.
-module(rabbitmq_peer_discovery_k8s).
-behaviour(rabbit_peer_discovery_backend).
-export([init/0, list_nodes/0, supports_registration/0, register/0, unregister/0,
- post_registration/0, lock/1, unlock/1, randomized_startup_delay_range/0,
- send_event/3, generate_v1_event/7]).
+ post_registration/0, lock/1, unlock/1, send_event/3, generate_v1_event/7]).
-define(DELEGATE, rabbit_peer_discovery_k8s).
@@ -45,18 +44,14 @@ unregister() ->
post_registration() ->
?DELEGATE:post_registration().
--spec lock(Node :: atom()) -> not_supported.
+-spec lock(Node :: node()) -> {ok, {ResourceId :: string(), LockRequesterId :: node()}} | {error, Reason :: string()}.
lock(Node) ->
?DELEGATE:lock(Node).
--spec unlock(Data :: term()) -> ok.
+-spec unlock({{ResourceId :: string(), LockRequestedId :: atom()}, Nodes :: [atom()]}) -> 'ok'.
unlock(Data) ->
?DELEGATE:unlock(Data).
--spec randomized_startup_delay_range() -> {integer(), integer()}.
-randomized_startup_delay_range() ->
- ?DELEGATE:randomized_startup_delay_range().
-
generate_v1_event(Namespace, Name, Type, Message, Reason, Timestamp, HostName) ->
?DELEGATE:generate_v1_event(Namespace, Name, Type, Message, Reason, Timestamp, HostName).
diff --git a/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_app.erl b/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_app.erl
index 0a77e1305e..57df5964b0 100644
--- a/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_app.erl
+++ b/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_k8s_app).
diff --git a/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_node_monitor.erl b/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_node_monitor.erl
index 60bf73a23d..be256ed4a8 100644
--- a/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_node_monitor.erl
+++ b/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_node_monitor.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% This gen_server receives node monitoring events from net_kernel
diff --git a/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_sup.erl b/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_sup.erl
index f7a0b1c094..6652037c7f 100644
--- a/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_sup.erl
+++ b/deps/rabbitmq_peer_discovery_k8s/src/rabbitmq_peer_discovery_k8s_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_k8s_sup).
diff --git a/deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE.erl b/deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE.erl
index 6fa29b19c6..e5455fea36 100644
--- a/deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE_data/rabbitmq_peer_discovery_k8s.snippets b/deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE_data/rabbitmq_peer_discovery_k8s.snippets
index 64ad3f0d9f..5c7223bd25 100644
--- a/deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE_data/rabbitmq_peer_discovery_k8s.snippets
+++ b/deps/rabbitmq_peer_discovery_k8s/test/config_schema_SUITE_data/rabbitmq_peer_discovery_k8s.snippets
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
[
@@ -19,7 +19,7 @@
]}
], [rabbitmq_peer_discovery_k8s]
},
-
+
{k8s_discovery_mechanism_as_alias1,
"cluster_formation.peer_discovery_backend = k8s
cluster_formation.k8s.host = k8s.eng.megacorp.local", [
@@ -33,7 +33,7 @@
]}
], [rabbitmq_peer_discovery_k8s]
},
-
+
{k8s_discovery_mechanism_as_alias2,
"cluster_formation.peer_discovery_backend = kubernetes
cluster_formation.k8s.host = k8s.eng.megacorp.local", [
@@ -47,7 +47,7 @@
]}
], [rabbitmq_peer_discovery_k8s]
},
-
+
{k8s_host, "cluster_formation.k8s.host = k8s.eng.megacorp.local", [
{rabbit, [
{cluster_formation, [
diff --git a/deps/rabbitmq_peer_discovery_k8s/test/rabbitmq_peer_discovery_k8s_SUITE.erl b/deps/rabbitmq_peer_discovery_k8s/test/rabbitmq_peer_discovery_k8s_SUITE.erl
index 546f48d0f2..88fe2843f2 100644
--- a/deps/rabbitmq_peer_discovery_k8s/test/rabbitmq_peer_discovery_k8s_SUITE.erl
+++ b/deps/rabbitmq_peer_discovery_k8s/test/rabbitmq_peer_discovery_k8s_SUITE.erl
@@ -4,7 +4,7 @@
%%
%% The Initial Developer of the Original Code is AWeber Communications.
%% Copyright (c) 2015-2016 AWeber Communications
-%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2016-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbitmq_peer_discovery_k8s_SUITE).
@@ -14,9 +14,14 @@
-include_lib("eunit/include/eunit.hrl").
+%% rabbitmq/cluster-operator contains an implicit integration test
+%% for the rabbitmq_peer_discovery_k8s plugin added by
+%% https://github.com/rabbitmq/cluster-operator/pull/704
+
all() ->
[
- {group, unit}
+ {group, unit},
+ {group, lock}
].
groups() ->
@@ -31,15 +36,21 @@ groups() ->
node_name_suffix_test,
registration_support,
event_v1_test
- ]}].
+ ]},
+ {lock, [], [
+ lock_single_node,
+ lock_multiple_nodes,
+ lock_local_node_not_discovered,
+ lock_list_nodes_fails
+ ]}
+ ].
init_per_testcase(T, Config) when T == node_name_empty_test;
T == node_name_suffix_test ->
meck:new(net_kernel, [passthrough, unstick]),
meck:expect(net_kernel, longnames, fun() -> true end),
Config;
-init_per_testcase(_, Config) ->
- Config.
+init_per_testcase(_, Config) -> Config.
end_per_testcase(_, _Config) ->
meck:unload(),
@@ -52,7 +63,7 @@ end_per_testcase(_, _Config) ->
%%%
registration_support(_Config) ->
- ?assertEqual(rabbit_peer_discovery_k8s:supports_registration(), true).
+ ?assertEqual(true, rabbit_peer_discovery_k8s:supports_registration()).
extract_node_list_long_test(_Config) ->
{ok, Response} =
@@ -92,9 +103,10 @@ extract_node_list_with_not_ready_addresses_test(_Config) ->
{ok, Response} =
rabbit_json:try_decode(
rabbit_data_coercion:to_binary(
- "{\"kind\":\"Endpoints\",\"apiVersion\":\"v1\",\"metadata\":{\"name\":\"rabbitmq\",\"namespace\":\"test-rabbitmq\",\"selfLink\":\"\/api\/v1\/namespaces\/test-rabbitmq\/endpoints\/rabbitmq\",\"uid\":\"4ff733b8-3ad2-11e7-a40d-080027cbdcae\",\"resourceVersion\":\"170098\",\"creationTimestamp\":\"2017-05-17T07:27:41Z\",\"labels\":{\"app\":\"rabbitmq\",\"type\":\"LoadBalancer\"}},\"subsets\":[{\"notReadyAddresses\":[{\"ip\":\"172.17.0.2\",\"hostname\":\"rabbitmq-0\",\"nodeName\":\"minikube\",\"targetRef\":{\"kind\":\"Pod\",\"namespace\":\"test-rabbitmq\",\"name\":\"rabbitmq-0\",\"uid\":\"e980fe5a-3afd-11e7-a40d-080027cbdcae\",\"resourceVersion\":\"170044\"}},{\"ip\":\"172.17.0.4\",\"hostname\":\"rabbitmq-1\",\"nodeName\":\"minikube\",\"targetRef\":{\"kind\":\"Pod\",\"namespace\":\"test-rabbitmq\",\"name\":\"rabbitmq-1\",\"uid\":\"f6285603-3afd-11e7-a40d-080027cbdcae\",\"resourceVersion\":\"170071\"}},{\"ip\":\"172.17.0.5\",\"hostname\":\"rabbitmq-2\",\"nodeName\":\"minikube\",\"targetRef\":{\"kind\":\"Pod\",\"namespace\":\"test-rabbitmq\",\"name\":\"rabbitmq-2\",\"uid\":\"fd5a86dc-3afd-11e7-a40d-080027cbdcae\",\"resourceVersion\":\"170096\"}}],\"ports\":[{\"name\":\"amqp\",\"port\":5672,\"protocol\":\"TCP\"},{\"name\":\"http\",\"port\":15672,\"protocol\":\"TCP\"}]}]}")),
- Expectation = [],
- ?assertEqual(Expectation, rabbit_peer_discovery_k8s:extract_node_list(Response)).
+ "{\"kind\":\"Endpoints\",\"apiVersion\":\"v1\",\"metadata\":{\"name\":\"rabbitmq\",\"namespace\":\"test-rabbitmq\",\"selfLink\":\"\/api\/v1\/namespaces\/test-rabbitmq\/endpoints\/rabbitmq\",\"uid\":\"4ff733b8-3ad2-11e7-a40d-080027cbdcae\",\"resourceVersion\":\"170098\",\"creationTimestamp\":\"2017-05-17T07:27:41Z\",\"labels\":{\"app\":\"rabbitmq\",\"type\":\"LoadBalancer\"}},\"subsets\":[{\"addresses\":[{\"ip\":\"10.1.29.8\",\"targetRef\":{\"kind\":\"Pod\",\"namespace\":\"default\",\"name\":\"mariadb-tco7k\",\"uid\":\"fb59cc71-558c-11e6-86e9-ecf4bbd91e6c\",\"resourceVersion\":\"13034802\"}}],\"ports\":[{\"name\":\"mysql\",\"port\":3306,\"protocol\":\"TCP\"}]},{\"notReadyAddresses\":[{\"ip\":\"172.17.0.2\",\"hostname\":\"rabbitmq-0\",\"nodeName\":\"minikube\",\"targetRef\":{\"kind\":\"Pod\",\"namespace\":\"test-rabbitmq\",\"name\":\"rabbitmq-0\",\"uid\":\"e980fe5a-3afd-11e7-a40d-080027cbdcae\",\"resourceVersion\":\"170044\"}},{\"ip\":\"172.17.0.4\",\"hostname\":\"rabbitmq-1\",\"nodeName\":\"minikube\",\"targetRef\":{\"kind\":\"Pod\",\"namespace\":\"test-rabbitmq\",\"name\":\"rabbitmq-1\",\"uid\":\"f6285603-3afd-11e7-a40d-080027cbdcae\",\"resourceVersion\":\"170071\"}},{\"ip\":\"172.17.0.5\",\"hostname\":\"rabbitmq-2\",\"nodeName\":\"minikube\",\"targetRef\":{\"kind\":\"Pod\",\"namespace\":\"test-rabbitmq\",\"name\":\"rabbitmq-2\",\"uid\":\"fd5a86dc-3afd-11e7-a40d-080027cbdcae\",\"resourceVersion\":\"170096\"}}],\"ports\":[{\"name\":\"amqp\",\"port\":5672,\"protocol\":\"TCP\"},{\"name\":\"http\",\"port\":15672,\"protocol\":\"TCP\"}]}]}")),
+ Expectation = [<<"10.1.29.8">>,
+ <<"172.17.0.2">>, <<"172.17.0.4">>, <<"172.17.0.5">>],
+ ?assertEqual(Expectation, lists:sort(rabbit_peer_discovery_k8s:extract_node_list(Response))).
node_name_empty_test(_Config) ->
Expectation = 'rabbit@rabbitmq-0',
@@ -130,3 +142,33 @@ event_v1_test(_Config) ->
?assertEqual(Expectation,
rabbit_peer_discovery_k8s:generate_v1_event(<<"namespace">>, "test",
"Normal", "Reason", "MyMessage", "2019-12-06T15:10:23+00:00", "MyHostName")).
+
+lock_single_node(_Config) ->
+ LocalNode = node(),
+ Nodes = [LocalNode],
+ meck:expect(rabbit_peer_discovery_k8s, list_nodes, 0, {ok, {[LocalNode], disc}}),
+
+ {ok, {LockId, Nodes}} = rabbit_peer_discovery_k8s:lock(LocalNode),
+ ?assertEqual(ok, rabbit_peer_discovery_k8s:unlock({LockId, Nodes})).
+
+lock_multiple_nodes(_Config) ->
+ application:set_env(rabbit, cluster_formation, [{internal_lock_retries, 2}]),
+ LocalNode = node(),
+ OtherNode = other@host,
+ Nodes = [OtherNode, LocalNode],
+ meck:expect(rabbit_peer_discovery_k8s, list_nodes, 0, {ok, {Nodes, disc}}),
+
+ {ok, {{LockResourceId, OtherNode}, Nodes}} = rabbit_peer_discovery_k8s:lock(OtherNode),
+ ?assertEqual({error, "Acquiring lock taking too long, bailing out after 2 retries"}, rabbit_peer_discovery_k8s:lock(LocalNode)),
+ ?assertEqual(ok, rabbitmq_peer_discovery_k8s:unlock({{LockResourceId, OtherNode}, Nodes})),
+ ?assertEqual({ok, {{LockResourceId, LocalNode}, Nodes}}, rabbit_peer_discovery_k8s:lock(LocalNode)),
+ ?assertEqual(ok, rabbitmq_peer_discovery_k8s:unlock({{LockResourceId, LocalNode}, Nodes})).
+
+lock_local_node_not_discovered(_Config) ->
+ meck:expect(rabbit_peer_discovery_k8s, list_nodes, 0, {ok, {[n1@host, n2@host], disc}} ),
+ Expectation = {error, "Local node me@host is not part of discovered nodes [n1@host,n2@host]"},
+ ?assertEqual(Expectation, rabbit_peer_discovery_k8s:lock(me@host)).
+
+lock_list_nodes_fails(_Config) ->
+ meck:expect(rabbit_peer_discovery_k8s, list_nodes, 0, {error, "K8s API unavailable"}),
+ ?assertEqual({error, "K8s API unavailable"}, rabbit_peer_discovery_k8s:lock(me@host)).
diff --git a/deps/rabbitmq_prometheus/.dockerignore b/deps/rabbitmq_prometheus/.dockerignore
deleted file mode 100644
index 00ebe93b6d..0000000000
--- a/deps/rabbitmq_prometheus/.dockerignore
+++ /dev/null
@@ -1,8 +0,0 @@
-.erlang.mk
-.git
-ebin
-logs
-prometheus
-src
-test
-tmp
diff --git a/deps/rabbitmq_prometheus/BUILD.bazel b/deps/rabbitmq_prometheus/BUILD.bazel
new file mode 100644
index 0000000000..622bdba958
--- /dev/null
+++ b/deps/rabbitmq_prometheus/BUILD.bazel
@@ -0,0 +1,73 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_prometheus"
+
+APP_MODULE = "rabbit_prometheus_app"
+
+APP_ENV = """[
+ {return_per_object_metrics, false}
+]"""
+
+BUILD_DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+DEPS = [
+ "@prometheus//:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+ "//deps/rabbitmq_web_dispatch:bazel_erlang_lib",
+ "@accept//:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_prometheus"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ size = "small",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_prometheus_http_SUITE",
+ size = "medium",
+ flaky = True,
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_prometheus/Dockerfile b/deps/rabbitmq_prometheus/Dockerfile
deleted file mode 100644
index 3452115475..0000000000
--- a/deps/rabbitmq_prometheus/Dockerfile
+++ /dev/null
@@ -1,315 +0,0 @@
-# The official Canonical Ubuntu Bionic image is ideal from a security perspective,
-# especially for the enterprises that we, the RabbitMQ team, have to deal with
-FROM ubuntu:18.04
-
-RUN set -eux; \
- apt-get update; \
- apt-get install -y --no-install-recommends \
-# grab gosu for easy step-down from root
- gosu \
- ; \
- rm -rf /var/lib/apt/lists/*; \
-# verify that the "gosu" binary works
- gosu nobody true
-
-# Default to a PGP keyserver that pgp-happy-eyeballs recognizes, but allow for substitutions locally
-ARG PGP_KEYSERVER=ha.pool.sks-keyservers.net
-# If you are building this image locally and are getting `gpg: keyserver receive failed: No data` errors,
-# run the build with a different PGP_KEYSERVER, e.g. docker build --tag rabbitmq:3.7 --build-arg PGP_KEYSERVER=pgpkeys.eu 3.7/ubuntu
-# For context, see https://github.com/docker-library/official-images/issues/4252
-
-# Using the latest OpenSSL LTS release, with support until September 2023 - https://www.openssl.org/source/
-ENV OPENSSL_VERSION 1.1.1g
-ENV OPENSSL_SOURCE_SHA256="ddb04774f1e32f0c49751e21b67216ac87852ceb056b75209af2443400636d46"
-# https://www.openssl.org/community/omc.html
-ENV OPENSSL_PGP_KEY_IDS="0x8657ABB260F056B1E5190839D9C4D26D0E604491 0x5B2545DAB21995F4088CEFAA36CEE4DEB00CFE33 0xED230BEC4D4F2518B9D7DF41F0DB4D21C1D35231 0xC1F33DD8CE1D4CC613AF14DA9195C48241FBF7DD 0x7953AC1FBC3DC8B3B292393ED5E9E43F7DF9EE8C 0xE5E52560DD91C556DDBDA5D02064C53641C25E5D"
-
-# Use the latest stable Erlang/OTP release - make find-latest-otp - https://github.com/erlang/otp/tags
-ARG OTP_VERSION
-ENV OTP_VERSION ${OTP_VERSION}
-# TODO add PGP checking when the feature will be added to Erlang/OTP's build system
-# http://erlang.org/pipermail/erlang-questions/2019-January/097067.html
-ARG OTP_SHA256
-ENV OTP_SOURCE_SHA256=${OTP_SHA256}
-
-# Install dependencies required to build Erlang/OTP from source
-# http://erlang.org/doc/installation_guide/INSTALL.html
-# autoconf: Required to configure Erlang/OTP before compiling
-# dpkg-dev: Required to set up host & build type when compiling Erlang/OTP
-# gnupg: Required to verify OpenSSL artefacts
-# libncurses5-dev: Required for Erlang/OTP new shell & observer_cli - https://github.com/zhongwencool/observer_cli
-RUN set -eux; \
- \
- savedAptMark="$(apt-mark showmanual)"; \
- apt-get update; \
- apt-get install --yes --no-install-recommends \
- autoconf \
- ca-certificates \
- dpkg-dev \
- gcc \
- gnupg \
- libncurses5-dev \
- make \
- wget \
- ; \
- rm -rf /var/lib/apt/lists/*; \
- \
- OPENSSL_SOURCE_URL="https://www.openssl.org/source/openssl-$OPENSSL_VERSION.tar.gz"; \
- OPENSSL_PATH="/usr/local/src/openssl-$OPENSSL_VERSION"; \
- OPENSSL_CONFIG_DIR=/usr/local/etc/ssl; \
- \
-# Required by the crypto & ssl Erlang/OTP applications
- wget --progress dot:giga --output-document "$OPENSSL_PATH.tar.gz.asc" "$OPENSSL_SOURCE_URL.asc"; \
- wget --progress dot:giga --output-document "$OPENSSL_PATH.tar.gz" "$OPENSSL_SOURCE_URL"; \
- export GNUPGHOME="$(mktemp -d)"; \
- for key in $OPENSSL_PGP_KEY_IDS; do \
- gpg --batch --keyserver "$PGP_KEYSERVER" --recv-keys "$key" || true; \
- done; \
- gpg --batch --verify "$OPENSSL_PATH.tar.gz.asc" "$OPENSSL_PATH.tar.gz"; \
- gpgconf --kill all; \
- rm -rf "$GNUPGHOME"; \
- echo "$OPENSSL_SOURCE_SHA256 *$OPENSSL_PATH.tar.gz" | sha256sum --check --strict -; \
- mkdir -p "$OPENSSL_PATH"; \
- tar --extract --file "$OPENSSL_PATH.tar.gz" --directory "$OPENSSL_PATH" --strip-components 1; \
- \
-# Configure OpenSSL for compilation
- cd "$OPENSSL_PATH"; \
-# OpenSSL's "config" script uses a lot of "uname"-based target detection...
- MACHINE="$(dpkg-architecture --query DEB_BUILD_GNU_CPU)" \
- RELEASE="4.x.y-z" \
- SYSTEM='Linux' \
- BUILD='???' \
- ./config \
- --openssldir="$OPENSSL_CONFIG_DIR" \
-# add -rpath to avoid conflicts between our OpenSSL's "libssl.so" and the libssl package by making sure /usr/local/lib is searched first (but only for Erlang/OpenSSL to avoid issues with other tools using libssl; https://github.com/docker-library/rabbitmq/issues/364)
- -Wl,-rpath=/usr/local/lib \
- ; \
-# Compile, install OpenSSL, verify that the command-line works & development headers are present
- make -j "$(getconf _NPROCESSORS_ONLN)"; \
- make install_sw install_ssldirs; \
- cd ..; \
- rm -rf "$OPENSSL_PATH"*; \
- ldconfig; \
-# use Debian's CA certificates
- rmdir "$OPENSSL_CONFIG_DIR/certs" "$OPENSSL_CONFIG_DIR/private"; \
- ln -sf /etc/ssl/certs /etc/ssl/private "$OPENSSL_CONFIG_DIR"; \
-# smoke test
- openssl version; \
- \
- OTP_SOURCE_URL="https://github.com/erlang/otp/archive/OTP-$OTP_VERSION.tar.gz"; \
- OTP_PATH="/usr/local/src/otp-$OTP_VERSION"; \
- \
-# Download, verify & extract OTP_SOURCE
- mkdir -p "$OTP_PATH"; \
- wget --progress dot:giga --output-document "$OTP_PATH.tar.gz" "$OTP_SOURCE_URL"; \
- echo "$OTP_SOURCE_SHA256 *$OTP_PATH.tar.gz" | sha256sum --check --strict -; \
- tar --extract --file "$OTP_PATH.tar.gz" --directory "$OTP_PATH" --strip-components 1; \
- \
-# Configure Erlang/OTP for compilation, disable unused features & applications
-# http://erlang.org/doc/applications.html
-# ERL_TOP is required for Erlang/OTP makefiles to find the absolute path for the installation
- cd "$OTP_PATH"; \
- export ERL_TOP="$OTP_PATH"; \
- ./otp_build autoconf; \
- CFLAGS="$(dpkg-buildflags --get CFLAGS)"; export CFLAGS; \
-# add -rpath to avoid conflicts between our OpenSSL's "libssl.so" and the libssl package by making sure /usr/local/lib is searched first (but only for Erlang/OpenSSL to avoid issues with other tools using libssl; https://github.com/docker-library/rabbitmq/issues/364)
- export CFLAGS="$CFLAGS -Wl,-rpath=/usr/local/lib"; \
- hostArch="$(dpkg-architecture --query DEB_HOST_GNU_TYPE)"; \
- buildArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)"; \
- dpkgArch="$(dpkg --print-architecture)"; dpkgArch="${dpkgArch##*-}"; \
- ./configure \
- --host="$hostArch" \
- --build="$buildArch" \
- --disable-dynamic-ssl-lib \
- --disable-hipe \
- --disable-sctp \
- --disable-silent-rules \
- --enable-clock-gettime \
- --enable-hybrid-heap \
- --enable-kernel-poll \
- --enable-shared-zlib \
- --enable-smp-support \
- --enable-threads \
- --with-microstate-accounting=extra \
- --without-common_test \
- --without-debugger \
- --without-dialyzer \
- --without-diameter \
- --without-edoc \
- --without-erl_docgen \
- --without-erl_interface \
- --without-et \
- --without-eunit \
- --without-ftp \
- --without-hipe \
- --without-jinterface \
- --without-megaco \
- --without-observer \
- --without-odbc \
- --without-reltool \
- --without-ssh \
- --without-tftp \
- --without-wx \
- ; \
-# Compile & install Erlang/OTP
- make -j "$(getconf _NPROCESSORS_ONLN)" GEN_OPT_FLGS="-O2 -fno-strict-aliasing"; \
- make install; \
- cd ..; \
- rm -rf \
- "$OTP_PATH"* \
- /usr/local/lib/erlang/lib/*/examples \
- /usr/local/lib/erlang/lib/*/src \
- ; \
- \
-# reset apt-mark's "manual" list so that "purge --auto-remove" will remove all build dependencies
- apt-mark auto '.*' > /dev/null; \
- [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; \
- find /usr/local -type f -executable -exec ldd '{}' ';' \
- | awk '/=>/ { print $(NF-1) }' \
- | sort -u \
- | xargs -r dpkg-query --search \
- | cut -d: -f1 \
- | sort -u \
- | xargs -r apt-mark manual \
- ; \
- apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \
- \
-# Check that OpenSSL still works after purging build dependencies
- openssl version; \
-# Check that Erlang/OTP crypto & ssl were compiled against OpenSSL correctly
- erl -noshell -eval 'io:format("~p~n~n~p~n~n", [crypto:supports(), ssl:versions()]), init:stop().'
-
-ENV RABBITMQ_DATA_DIR=/var/lib/rabbitmq
-# Create rabbitmq system user & group, fix permissions & allow root user to connect to the RabbitMQ Erlang VM
-RUN set -eux; \
- groupadd --gid 999 --system rabbitmq; \
- useradd --uid 999 --system --home-dir "$RABBITMQ_DATA_DIR" --gid rabbitmq rabbitmq; \
- mkdir -p "$RABBITMQ_DATA_DIR" /etc/rabbitmq /tmp/rabbitmq-ssl /var/log/rabbitmq; \
- chown -fR rabbitmq:rabbitmq "$RABBITMQ_DATA_DIR" /etc/rabbitmq /tmp/rabbitmq-ssl /var/log/rabbitmq; \
- chmod 777 "$RABBITMQ_DATA_DIR" /etc/rabbitmq /tmp/rabbitmq-ssl /var/log/rabbitmq; \
- ln -sf "$RABBITMQ_DATA_DIR/.erlang.cookie" /root/.erlang.cookie
-
-# Use the latest alpha RabbitMQ 3.8 release - https://dl.bintray.com/rabbitmq/all-dev/rabbitmq-server/
-ARG RABBITMQ_VERSION
-ENV RABBITMQ_VERSION=${RABBITMQ_VERSION}
-ARG RABBITMQ_BUILD_NUMBER
-ENV RABBITMQ_BUILD_NUMBER=${RABBITMQ_BUILD_NUMBER}
-# https://www.rabbitmq.com/signatures.html#importing-gpg
-ENV RABBITMQ_PGP_KEY_ID="0x0A9AF2115F4687BD29803A206B73A36E6026DFCA"
-ENV RABBITMQ_HOME=/opt/rabbitmq
-
-# Add RabbitMQ to PATH, send all logs to TTY
-ENV PATH=$RABBITMQ_HOME/sbin:$PATH \
- RABBITMQ_LOGS=- RABBITMQ_SASL_LOGS=-
-
-# Install RabbitMQ
-RUN set -eux; \
- \
- savedAptMark="$(apt-mark showmanual)"; \
- apt-get update; \
- apt-get install --yes --no-install-recommends \
- ca-certificates \
- gnupg \
- wget \
- xz-utils \
- ; \
- rm -rf /var/lib/apt/lists/*; \
- \
- RABBITMQ_SOURCE_URL="https://s3-eu-west-1.amazonaws.com/server-release-pipeline/v3.9.x/unverified-packages/rabbitmq-server-${RABBITMQ_VERSION}-build-${RABBITMQ_BUILD_NUMBER}-generic-unix-latest-toolchain.tar"; \
- RABBITMQ_PATH="/usr/local/src/rabbitmq-$RABBITMQ_VERSION"; \
- \
- # wget --progress dot:giga --output-document "$RABBITMQ_PATH.tar.xz.asc" "$RABBITMQ_SOURCE_URL.asc"; \
- wget --progress dot:giga --output-document "$RABBITMQ_PATH.tar" "$RABBITMQ_SOURCE_URL"; \
- tar --extract --file "$RABBITMQ_PATH.tar"; \
- \
- # export GNUPGHOME="$(mktemp -d)"; \
- # gpg --batch --keyserver "$PGP_KEYSERVER" --recv-keys "$RABBITMQ_PGP_KEY_ID"; \
- # gpg --batch --verify "$RABBITMQ_PATH.tar.xz.asc" "$RABBITMQ_PATH.tar.xz"; \
- # gpgconf --kill all; \
- # rm -rf "$GNUPGHOME"; \
- \
- mkdir -p "$RABBITMQ_HOME"; \
- tar --extract --file "rabbitmq-server-generic-unix-latest-toolchain-${RABBITMQ_VERSION}.tar.xz" --directory "$RABBITMQ_HOME" --strip-components 1; \
- rm -rf "$RABBITMQ_PATH"* rabbitmq-server-generic-unix*; \
-# Do not default SYS_PREFIX to RABBITMQ_HOME, leave it empty
- grep -qE '^SYS_PREFIX=\$\{RABBITMQ_HOME\}$' "$RABBITMQ_HOME/sbin/rabbitmq-defaults"; \
- sed -i 's/^SYS_PREFIX=.*$/SYS_PREFIX=/' "$RABBITMQ_HOME/sbin/rabbitmq-defaults"; \
- grep -qE '^SYS_PREFIX=$' "$RABBITMQ_HOME/sbin/rabbitmq-defaults"; \
- chown -R rabbitmq:rabbitmq "$RABBITMQ_HOME"; \
- \
- apt-mark auto '.*' > /dev/null; \
- apt-mark manual $savedAptMark; \
- apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \
- \
-# verify assumption of no stale cookies
- [ ! -e "$RABBITMQ_DATA_DIR/.erlang.cookie" ]; \
-# Ensure RabbitMQ was installed correctly by running a few commands that do not depend on a running server, as the rabbitmq user
-# If they all succeed, it's safe to assume that things have been set up correctly
- gosu rabbitmq rabbitmqctl help; \
- gosu rabbitmq rabbitmqctl list_ciphers; \
- gosu rabbitmq rabbitmq-plugins list; \
-# no stale cookies
- rm "$RABBITMQ_DATA_DIR/.erlang.cookie"
-
-# Added for backwards compatibility - users can simply COPY custom plugins to /plugins
-RUN ln -sf /opt/rabbitmq/plugins /plugins
-
-# set home so that any `--user` knows where to put the erlang cookie
-ENV HOME $RABBITMQ_DATA_DIR
-# Hint that the data (a.k.a. home dir) dir should be separate volume
-VOLUME $RABBITMQ_DATA_DIR
-
-# warning: the VM is running with native name encoding of latin1 which may cause Elixir to malfunction as it expects utf8. Please ensure your locale is set to UTF-8 (which can be verified by running "locale" in your shell)
-# Setting all environment variables that control language preferences, behaviour differs - https://www.gnu.org/software/gettext/manual/html_node/The-LANGUAGE-variable.html#The-LANGUAGE-variable
-# https://docs.docker.com/samples/library/ubuntu/#locales
-ENV LANG=C.UTF-8 LANGUAGE=C.UTF-8 LC_ALL=C.UTF-8
-
-COPY docker/docker-entrypoint.sh /usr/local/bin/
-ENTRYPOINT ["docker-entrypoint.sh"]
-
-EXPOSE 4369 5671 5672 25672
-CMD ["rabbitmq-server"]
-
-# rabbitmq_management
-RUN rabbitmq-plugins enable --offline rabbitmq_management && \
- rabbitmq-plugins is_enabled rabbitmq_management --offline
-# extract "rabbitmqadmin" from inside the "rabbitmq_management-X.Y.Z.ez" plugin zipfile
-# see https://github.com/docker-library/rabbitmq/issues/207
-RUN set -eux; \
- erl -noinput -eval ' \
- { ok, AdminBin } = zip:foldl(fun(FileInArchive, GetInfo, GetBin, Acc) -> \
- case Acc of \
- "" -> \
- case lists:suffix("/rabbitmqadmin", FileInArchive) of \
- true -> GetBin(); \
- false -> Acc \
- end; \
- _ -> Acc \
- end \
- end, "", init:get_plain_arguments()), \
- io:format("~s", [ AdminBin ]), \
- init:stop(). \
- ' -- /plugins/rabbitmq_management-*.ez > /usr/local/bin/rabbitmqadmin; \
- [ -s /usr/local/bin/rabbitmqadmin ]; \
- chmod +x /usr/local/bin/rabbitmqadmin; \
- apt-get update; apt-get install -y --no-install-recommends python3; rm -rf /var/lib/apt/lists/*; \
- rabbitmqadmin --version
-EXPOSE 15671 15672
-
-# rabbitmq_top
-RUN rabbitmq-plugins enable --offline rabbitmq_top && \
- rabbitmq-plugins is_enabled rabbitmq_top --offline
-
-# rabbitmq_prometheus
-RUN rm /plugins/prometheus*.ez
-COPY plugins/prometheus*.ez /plugins/
-RUN rm /plugins/rabbitmq_prometheus*.ez
-COPY plugins/rabbitmq_prometheus*.ez /plugins/
-
-ARG RABBITMQ_PROMETHEUS_VERSION
-RUN chmod --recursive --verbose a+r /plugins/*.ez && \
- chown --recursive --verbose rabbitmq:rabbitmq /plugins && \
- rabbitmq-plugins enable --offline rabbitmq_prometheus && \
- rabbitmq-plugins is_enabled rabbitmq_prometheus --offline && \
- rabbitmq-plugins list | grep "rabbitmq_prometheus.*${RABBITMQ_PROMETHEUS_VERSION}"
-EXPOSE 15692
diff --git a/deps/rabbitmq_prometheus/Makefile b/deps/rabbitmq_prometheus/Makefile
index 178429c7ae..333a3a3823 100644
--- a/deps/rabbitmq_prometheus/Makefile
+++ b/deps/rabbitmq_prometheus/Makefile
@@ -1,16 +1,3 @@
-TODAY := $(shell date -u +'%Y.%m.%d')
-# Use the latest alpha RabbitMQ 3.9 release - https://ci.rabbitmq.com/teams/main/pipelines/server-release:v3.9.x/jobs/build-test-package-generic-unix-latest-toolchain
-BASED_ON_RABBITMQ_VERSION := 3.9.0-alpha.349
-DOCKER_IMAGE_NAME := pivotalrabbitmq/rabbitmq-prometheus
-DOCKER_IMAGE_VERSION := $(BASED_ON_RABBITMQ_VERSION)-$(TODAY)
-# RABBITMQ_VERSION is used in rabbitmq-components.mk to set PROJECT_VERSION
-RABBITMQ_VERSION ?= $(DOCKER_IMAGE_VERSION)
-# This is taken from the CI job above
-RABBITMQ_BUILD_NUMBER := 375
-# make find-latest-otp
-OTP_VERSION := 23.0.2
-OTP_SHA256 := 6bab92d1a1b20cc319cd845c23db3611cc99f8c99a610d117578262e3c108af3
-
define PROJECT_ENV
[
{return_per_object_metrics, false}
@@ -19,10 +6,8 @@ endef
PROJECT := rabbitmq_prometheus
PROJECT_MOD := rabbit_prometheus_app
-DEPS = rabbit rabbitmq_management_agent prometheus rabbitmq_web_dispatch
-# Deps that are not applications
-# rabbitmq_management is added so that we build a custom version, for the Docker image
-BUILD_DEPS = accept amqp_client rabbit_common rabbitmq_management
+DEPS = accept rabbit rabbitmq_management_agent prometheus rabbitmq_web_dispatch
+BUILD_DEPS = amqp_client rabbit_common rabbitmq_management
TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers eunit_formatters
EUNIT_OPTS = no_tty, {report, {eunit_progress, [colored, profile]}}
@@ -37,112 +22,27 @@ ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
ifneq ($(DISABLE_METRICS_COLLECTOR),)
-BUILD_DEPS = accept amqp_client rabbit_common
RABBITMQ_CONFIG_FILE = $(CURDIR)/rabbitmq-disable-metrics-collector.conf
export RABBITMQ_CONFIG_FILE
endif
-include rabbitmq-components.mk
-include erlang.mk
-
-define MAKE_TARGETS
- awk -F: '/^[^\.%\t][a-zA-Z\._\-]*:+.*$$/ { printf "%s\n", $$1 }' $(MAKEFILE_LIST)
-endef
-define BASH_AUTOCOMPLETE
- complete -W \"$$($(MAKE_TARGETS) | sort | uniq)\" make gmake m
-endef
-.PHONY: autocomplete
-autocomplete: ## ac | Configure shell for autocompletion - eval "$(gmake autocomplete)"
- @echo "$(BASH_AUTOCOMPLETE)"
-.PHONY: ac
-ac: autocomplete
-# Continuous Feedback for the ac target - run in a separate pane while iterating on it
-.PHONY: CFac
-CFac:
- @watch -c $(MAKE) ac
-
-.PHONY: clean-docker
-clean-docker: ## cd | Clean all Docker containers & volumes
- @docker system prune -f && \
- docker volume prune -f
-.PHONY: cd
-cd: clean-docker
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
.PHONY: readme
-readme: ## r | Preview README & live reload on edit
+readme: # Preview README & live reload on edit
@docker run --interactive --tty --rm --name changelog_md \
--volume $(CURDIR):/data \
--volume $(HOME)/.grip:/.grip \
--expose 5000 --publish 5000:5000 \
mbentley/grip --context=. 0.0.0.0:5000
-.PHONY: pre
-pre: preview-readme
-
-define CTOP_CONTAINER
-docker pull quay.io/vektorlab/ctop:latest && \
-docker run --rm --interactive --tty \
- --cpus 0.5 --memory 128M \
- --volume /var/run/docker.sock:/var/run/docker.sock \
- --name ctop_$(USER) \
- quay.io/vektorlab/ctop:latest
-endef
-.PHONY: ctop
-ctop: ## c | Interact with all containers via a top-like utility
- @$(CTOP_CONTAINER)
-.PHONY: c
-c: ctop
-
-.PHONY: dockerhub-login
-dockerhub-login: ## dl | Login to Docker Hub as pivotalrabbitmq
- @echo "$$(lpass show --password 7672183166535202820)" | \
- docker login --username pivotalrabbitmq --password-stdin
-.PHONY: dl
-dl: dockerhub-login
-
-.PHONY: docker-image
-docker-image: docker-image-build docker-image-push ## di | Build & push Docker image to Docker Hub
-.PHONY: di
-di: docker-image
-
-.PHONY: docker-image-build
-docker-image-build: ## dib | Build Docker image locally - make tests
- @docker build --pull \
- --build-arg PGP_KEYSERVER=pgpkeys.uk \
- --build-arg OTP_VERSION=$(OTP_VERSION) \
- --build-arg OTP_SHA256=$(OTP_SHA256) \
- --build-arg RABBITMQ_VERSION=$(BASED_ON_RABBITMQ_VERSION) \
- --build-arg RABBITMQ_BUILD_NUMBER=$(RABBITMQ_BUILD_NUMBER) \
- --build-arg RABBITMQ_PROMETHEUS_VERSION=$(RABBITMQ_VERSION) \
- --tag $(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_VERSION) \
- --tag $(DOCKER_IMAGE_NAME):latest .
-.PHONY: dib
-dib: docker-image-build
.PHONY: docker-image-bump
-docker-image-bump: ## diu | Bump Docker image version across all docker-compose-* files
+docker-image-bump: # Bump Docker image version across all docker-compose-* files
@sed -i '' \
-e 's|$(DOCKER_IMAGE_NAME):.*|$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_VERSION)|g' \
-e 's|pivotalrabbitmq/perf-test:.*|pivotalrabbitmq/perf-test:2.11.0-ubuntu|g' \
- docker/docker-compose-{overview,dist-tls,qq}.yml
-.PHONY: diu
-diu: docker-image-bump
-
-.PHONY: docker-image-push
-docker-image-push: ## dip | Push local Docker image to Docker Hub
- @docker push $(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_VERSION) && \
- docker push $(DOCKER_IMAGE_NAME):latest
-.PHONY: dip
-dip: docker-image-push
-
-.PHONY: docker-image-run
-docker-image-run: ## dir | Run container with local Docker image
- @docker run --interactive --tty \
- --publish=5672:5672 \
- --publish=15672:15672 \
- --publish=15692:15692 \
- $(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_VERSION)
-.PHONY: dir
-dir: docker-image-run
+ docker/docker-compose-{overview,dist-tls,qq,dist-metrics}.yml
RUN ?= up --detach && docker-compose --file $(@F) logs --follow
DOCKER_COMPOSE_FILES := $(wildcard docker/docker-compose-*.yml)
@@ -153,30 +53,7 @@ $(DOCKER_COMPOSE_FILES):
true
.PHONY: down
down: RUN = down
-down: $(DOCKER_COMPOSE_FILES) ## d | Stop all containers
-.PHONY: d
-d: down
-
-JQ := /usr/local/bin/jq
-$(JQ):
- @brew install jq
-
-OTP_CURRENT_STABLE_MAJOR := 23
-define LATEST_STABLE_OTP_VERSION
-curl --silent --fail https://api.github.com/repos/erlang/otp/git/refs/tags | \
- $(JQ) -r '.[].ref | sub("refs/tags/OTP.{1}";"") | match("^$(OTP_CURRENT_STABLE_MAJOR)[0-9.]+$$") | .string' | \
- tail -n 1
-endef
-.PHONY: find-latest-otp
-find-latest-otp: $(JQ) ## flo | Find latest OTP version archive + sha1
- @printf "Version: " && \
- export VERSION="$$($(LATEST_STABLE_OTP_VERSION))" && \
- echo "$$VERSION" && \
- printf "Checksum: " && \
- wget --continue --quiet --output-document="/tmp/OTP-$$VERSION.tar.gz" "https://github.com/erlang/otp/archive/OTP-$$VERSION.tar.gz" && \
- shasum -a 256 "/tmp/OTP-$$VERSION.tar.gz"
-.PHONY: flo
-flo: find-latest-otp
+down: $(DOCKER_COMPOSE_FILES) # Stop all containers
# Defined as explicit, individual targets so that autocompletion works
define DOCKER_COMPOSE_UP
@@ -184,34 +61,25 @@ cd docker && \
docker-compose --file docker-compose-$(@F).yml up --detach
endef
.PHONY: metrics
-metrics: ## m | Run all metrics containers: Grafana, Prometheus & friends
+metrics: # Run all metrics containers: Grafana, Prometheus & friends
@$(DOCKER_COMPOSE_UP)
-.PHONY: m
-m: metrics
.PHONY: overview
-overview: ## o | Make RabbitMQ Overview panels come alive
+overview: # Make RabbitMQ Overview panels come alive
@$(DOCKER_COMPOSE_UP)
-.PHONY: o
-o: overview
.PHONY: dist-tls
-dist-tls: ## dt | Make Erlang-Distribution panels come alive - HIGH LOAD
+dist-tls: # Make Erlang-Distribution panels come alive - HIGH LOAD
@$(DOCKER_COMPOSE_UP)
-.PHONY: dt
-dt: dist-tls
.PHONY: qq
-qq: ## q | Make RabbitMQ-Quorum-Queues-Raft panels come alive - HIGH LOAD
+qq: # Make RabbitMQ-Quorum-Queues-Raft panels come alive - HIGH LOAD
@$(DOCKER_COMPOSE_UP)
-.PHONY: q
-q: qq
-.PHONY: h
-h:
- @awk -F"[:#]" '/^[^\.][a-zA-Z\._\-]+:+.+##.+$$/ { printf "\033[36m%-24s\033[0m %s\n", $$1, $$4 }' $(MAKEFILE_LIST) \
- | sort
-# Continuous Feedback for the h target - run in a separate pane while iterating on it
-.PHONY: CFh
-CFh:
- @watch -c $(MAKE) h
+.PHONY: dist-metrics
+dist-metrics: # Make inet_tcp_metrics come alive
+ @$(DOCKER_COMPOSE_UP)
+
+JQ := /usr/local/bin/jq
+$(JQ):
+ @brew install jq
# Defined as explicit, individual targets so that autocompletion works
DASHBOARDS_TO_PATH := $(CURDIR)/docker/grafana/dashboards
@@ -242,4 +110,7 @@ RabbitMQ-Quorum-Queues-Raft.json: $(JQ)
.PHONY: rabbitmq-exporter_vs_rabbitmq-prometheus.json
rabbitmq-exporter_vs_rabbitmq-prometheus.json: $(JQ)
@$(GENERATE_DASHBOARD)
+.PHONY: RabbitMQ-Stream.json
+RabbitMQ-Stream.json: $(JQ)
+ @$(GENERATE_DASHBOARD)
diff --git a/deps/rabbitmq_prometheus/README.md b/deps/rabbitmq_prometheus/README.md
index 5aeb625ca3..61a5a2b903 100644
--- a/deps/rabbitmq_prometheus/README.md
+++ b/deps/rabbitmq_prometheus/README.md
@@ -66,7 +66,14 @@ When metrics are returned per object, nodes with 80k queues have been measured t
In order to not put unnecessary pressure on your metrics system, metrics are aggregated by default.
When debugging, it may be useful to return metrics per object (unaggregated).
-This can be enabled on-the-fly, without restarting or configuring RabbitMQ, using the following command:
+
+This can be done by scraping the `/metrics/per-object` endpoint:
+```shell
+curl -v -H "Accept:text/plain" "http://localhost:15692/metrics/per-object"
+```
+
+This can also be enabled as the default behavior of the `/metrics` endpoint on-the-fly,
+without restarting or configuring RabbitMQ, using the following command:
```
rabbitmqctl eval 'application:set_env(rabbitmq_prometheus, return_per_object_metrics, true).'
@@ -78,6 +85,20 @@ To go back to aggregated metrics on-the-fly, run the following command:
rabbitmqctl eval 'application:set_env(rabbitmq_prometheus, return_per_object_metrics, false).'
```
+## Selective querying of per-object metrics
+
+As mentioned in the previous section, returning a lot of per-object metrics is quite computationally expensive process. One of the reasons is that `/metrics/per-object` returns every possible metric for every possible object - even if having them makes no sense in the day-to-day monitoring activity.
+
+That's why there is an additional endpoint that always return per-object metrics and allows one to explicitly query only the things that are relevant - `/metrics/detailed`. By default it doesn't return anything at all, but it's possible to specify required metric groups and virtual host filters in the GET-parameters. Scraping `/metrics/detailed?vhost=vhost-1&vhost=vhost-2&family=queue_coarse_metrics&family=queue_consumer_count`. will only return requested metrics (and not, for example, channel metrics that include erlang PID in labels).
+
+This endpoint supports the following parameters:
+
+* Zero or more `family` - only the requested metric families will be returned. The full list is documented in [metrics-detailed](metrics-detailed.md).
+* Zero or more `vhost` - if it's given, queue related metrics (`queue_coarse_metrics`, `queue_consumer_count` and `queue_metrics`) will be returned only for given vhost(s).
+
+The returned metrics use different prefix `rabbitmq_detailed_` (instead of plain `rabbitmq_` used by other endpoints), so that endpoint can be used simultaneously with `/metrics`, and existing dashboards won't be affected.
+
+Here are the performance gains you can expect from using this endpoint. On a test system with 10k queues/10k consumer/10k producers, `/metrics/per-object` took a bit over 2 minutes. Querying `/metrics/detailed?family=queue_coarse_metrics&family=queue_consumer_count` provides just enough metrics to see how many messages sit in every queue and how much consumers each of these queues have. And it takes only 2 seconds, a significant improvement over indiscriminate `/metrics/per-object`.
## Contributing
diff --git a/deps/rabbitmq_prometheus/docker/docker-compose-dist-metrics.yml b/deps/rabbitmq_prometheus/docker/docker-compose-dist-metrics.yml
new file mode 100644
index 0000000000..b582b52ae5
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/docker-compose-dist-metrics.yml
@@ -0,0 +1,77 @@
+# https://docs.docker.com/compose/compose-file/
+version: "3.6"
+
+# https://docs.docker.com/compose/compose-file/#networks
+networks:
+ rabbitmq-prometheus:
+
+# https://docs.docker.com/compose/compose-file/#volumes
+volumes:
+ rabbitmq-prometheus_prometheus:
+ rabbitmq-prometheus_grafana:
+
+services:
+ rmq0-dist-metrics: &rabbitmq
+ # https://network.pivotal.io/products/rabbitmq/
+ # This is a commercial edition of RabbitMQ that requires a valid Tanzu Network account
+ # Learn more: https://rabbitmq.com/tanzu
+ image: dev.registry.pivotal.io/rabbitmq/vmware-tanzu-rabbitmq:1.0.0
+ networks:
+ - "rabbitmq-prometheus"
+ ports:
+ - "15680:15672"
+ - "15700:15692"
+ # https://unix.stackexchange.com/questions/71940/killing-tcp-connection-in-linux
+ # https://en.wikipedia.org/wiki/Tcpkill
+ # https://www.digitalocean.com/community/tutorials/iptables-essentials-common-firewall-rules-and-commands#block-an-ip-address
+ cap_add:
+ - ALL
+ hostname: rmq0-dist-metrics
+ environment:
+ # RABBITMQ_ERLANG_COOKIE: rabbitmq-prometheus
+ RABBITMQ_SERVER_START_ARGS: -proto_dist inet_tcp_metrics
+ # Uncomment the following line if you want debug logs & colour
+ # RABBITMQ_LOG: debug,+color
+ volumes:
+ # This does not work that well on Windows
+ # https://github.com/rabbitmq/rabbitmq-prometheus/commit/c4b04ea9bae877ff7d22a7085475965016933d91#commitcomment-40660523
+ - ./erlang.cookie:/var/lib/rabbitmq/.erlang.cookie
+ - ./rabbitmq-dist-metrics.conf:/etc/rabbitmq/rabbitmq.conf:ro
+ - ./rabbitmq-dist-metrics-definitions.json:/etc/rabbitmq/rabbitmq-definitions.json:ro
+ # we want to simulate hitting thresholds
+ ulimits:
+ nofile:
+ soft: 2000
+ hard: 2000
+ rmq1-dist-metrics:
+ << : *rabbitmq
+ hostname: rmq1-dist-metrics
+ ports:
+ - "15681:15672"
+ - "15701:15692"
+ rmq2-dist-metrics:
+ << : *rabbitmq
+ hostname: rmq2-dist-metrics
+ ports:
+ - "15682:15672"
+ - "15702:15692"
+
+ qq:
+ image: &perf-test-image pivotalrabbitmq/perf-test:2.15.0-ubuntu
+ networks:
+ - "rabbitmq-prometheus"
+ environment:
+ URIS: "amqp://guest:guest@rmq0-dist-metrics:5672/%2f,amqp://guest:guest@rmq1-dist-metrics:5672/%2f,amqp://guest:guest@rmq2-dist-metrics:5672/%2f"
+ CONFIRM: 50
+ QUEUE_PATTERN: "qq%d"
+ QUEUE_PATTERN_FROM: 1
+ QUEUE_PATTERN_TO: 3
+ PRODUCERS: 3
+ CONSUMERS: 3
+ QUEUE_ARGS: x-queue-type=quorum,x-max-length=1000
+ FLAG: persistent
+ AUTO_DELETE: "false"
+ RATE: 10
+ AUTOACK: "false"
+ SERVERS_STARTUP_TIMEOUT: &startup_timeout 30
+ METRICS_PROMETHEUS: "true"
diff --git a/deps/rabbitmq_prometheus/docker/docker-compose-dist-tls.yml b/deps/rabbitmq_prometheus/docker/docker-compose-dist-tls.yml
index 241de1c919..8a341c3a06 100644
--- a/deps/rabbitmq_prometheus/docker/docker-compose-dist-tls.yml
+++ b/deps/rabbitmq_prometheus/docker/docker-compose-dist-tls.yml
@@ -12,12 +12,11 @@ volumes:
services:
rmq0-dist-tls: &rabbitmq
- # https://hub.docker.com/r/pivotalrabbitmq/rabbitmq-prometheus/tags
- image: pivotalrabbitmq/rabbitmq-prometheus:3.9.0-alpha.349-2020.06.18
+ # https://hub.docker.com/r/pivotalrabbitmq/rabbitmq/tags
+ image: pivotalrabbitmq/rabbitmq:master-otp-max
networks:
- "rabbitmq-prometheus"
ports:
- - "5676:5672"
- "15676:15672"
- "15696:15692"
# https://unix.stackexchange.com/questions/71940/killing-tcp-connection-in-linux
@@ -42,25 +41,23 @@ services:
# we want to simulate hitting thresholds
ulimits:
nofile:
- soft: "2000"
- hard: "2000"
+ soft: 2000
+ hard: 2000
rmq1-dist-tls:
<< : *rabbitmq
hostname: rmq1-dist-tls
ports:
- - "5677:5672"
- "15677:15672"
- "15697:15692"
rmq2-dist-tls:
<< : *rabbitmq
hostname: rmq2-dist-tls
ports:
- - "5678:5672"
- "15678:15672"
- "15698:15692"
stress-dist-tls:
# https://hub.docker.com/r/pivotalrabbitmq/perf-test/tags
- image: &perf-test-image pivotalrabbitmq/perf-test:2.11.0-ubuntu
+ image: &perf-test-image pivotalrabbitmq/perf-test:2.15.0-ubuntu
networks:
- "rabbitmq-prometheus"
environment:
@@ -79,7 +76,7 @@ services:
METRICS_PROMETHEUS: "true"
rabbitmq-exporter:
# https://hub.docker.com/r/kbudde/rabbitmq-exporter/tags
- image: kbudde/rabbitmq-exporter:v0.29.0
+ image: kbudde/rabbitmq-exporter:v1.0.0-RC9
networks:
- "rabbitmq-prometheus"
environment:
diff --git a/deps/rabbitmq_prometheus/docker/docker-compose-metrics.yml b/deps/rabbitmq_prometheus/docker/docker-compose-metrics.yml
index c977821f8d..8a3da12e9b 100644
--- a/deps/rabbitmq_prometheus/docker/docker-compose-metrics.yml
+++ b/deps/rabbitmq_prometheus/docker/docker-compose-metrics.yml
@@ -13,7 +13,7 @@ volumes:
services:
grafana:
# https://hub.docker.com/r/grafana/grafana/tags
- image: grafana/grafana:7.3.2
+ image: grafana/grafana:8.0.6
ports:
- "3000:3000"
networks:
@@ -34,7 +34,7 @@ services:
GF_INSTALL_PLUGINS: "flant-statusmap-panel,grafana-piechart-panel"
prometheus:
# https://hub.docker.com/r/prom/prometheus/tags
- image: prom/prometheus:v2.22.1
+ image: prom/prometheus:v2.28.1
networks:
- "rabbitmq-prometheus"
ports:
@@ -51,7 +51,7 @@ services:
expose:
- 9100
# https://hub.docker.com/r/prom/node-exporter/tags
- image: prom/node-exporter:v1.0.1
+ image: prom/node-exporter:v1.2.0
networks:
- "rabbitmq-prometheus"
volumes:
diff --git a/deps/rabbitmq_prometheus/docker/docker-compose-overview.yml b/deps/rabbitmq_prometheus/docker/docker-compose-overview.yml
index 92f039b30a..cc440c4a7f 100644
--- a/deps/rabbitmq_prometheus/docker/docker-compose-overview.yml
+++ b/deps/rabbitmq_prometheus/docker/docker-compose-overview.yml
@@ -12,12 +12,11 @@ volumes:
services:
rmq0: &rabbitmq
- # https://hub.docker.com/r/pivotalrabbitmq/rabbitmq-prometheus/tags
- image: pivotalrabbitmq/rabbitmq-prometheus:3.9.0-alpha.349-2020.06.18
+ # https://hub.docker.com/r/pivotalrabbitmq/rabbitmq/tags
+ image: pivotalrabbitmq/rabbitmq:master-otp-max
networks:
- "rabbitmq-prometheus"
ports:
- - "5673:5672"
- "15673:15672"
- "15693:15692"
# https://unix.stackexchange.com/questions/71940/killing-tcp-connection-in-linux
@@ -39,26 +38,24 @@ services:
# we want to simulate hitting thresholds
ulimits:
nofile:
- soft: "2000"
- hard: "2000"
+ soft: 2000
+ hard: 2000
rmq1:
<< : *rabbitmq
hostname: rmq1
ports:
- - "5674:5672"
- "15674:15672"
- "15694:15692"
rmq2:
<< : *rabbitmq
hostname: rmq2
ports:
- - "5675:5672"
- "15675:15672"
- "15695:15692"
basic-get:
# https://hub.docker.com/r/pivotalrabbitmq/perf-test/tags
- image: &perf-test-image pivotalrabbitmq/perf-test:2.11.0-ubuntu
+ image: &perf-test-image pivotalrabbitmq/perf-test:2.15.0-ubuntu
networks:
- "rabbitmq-prometheus"
environment:
@@ -173,6 +170,12 @@ services:
CONSUMERS: 0
SERVERS_STARTUP_TIMEOUT: *startup_timeout
METRICS_PROMETHEUS: "true"
+ stream:
+ image: pivotalrabbitmq/stream-perf-test
+ networks:
+ - "rabbitmq-prometheus"
+ command: "--uris rabbitmq-stream://guest:guest@rmq2:5552/%2f --max-length-bytes 100mb --rate 1000"
+ restart: on-failure
# many-queues:
# image: *perf-test-image
# networks:
diff --git a/deps/rabbitmq_prometheus/docker/docker-compose-qq.yml b/deps/rabbitmq_prometheus/docker/docker-compose-qq.yml
index 846fd1ba64..7b8d601649 100644
--- a/deps/rabbitmq_prometheus/docker/docker-compose-qq.yml
+++ b/deps/rabbitmq_prometheus/docker/docker-compose-qq.yml
@@ -12,12 +12,11 @@ volumes:
services:
rmq0-qq: &rabbitmq
- # https://hub.docker.com/r/pivotalrabbitmq/rabbitmq-prometheus/tags
- image: pivotalrabbitmq/rabbitmq-prometheus:3.9.0-alpha.349-2020.06.18
+ # https://hub.docker.com/r/pivotalrabbitmq/rabbitmq/tags
+ image: pivotalrabbitmq/rabbitmq:master-otp-max
networks:
- "rabbitmq-prometheus"
ports:
- - "5679:5672"
- "15679:15672"
- "15699:15692"
# https://unix.stackexchange.com/questions/71940/killing-tcp-connection-in-linux
@@ -41,18 +40,16 @@ services:
<< : *rabbitmq
hostname: rmq1-qq
ports:
- - "5680:5672"
- "15680:15672"
- "15700:15692"
rmq2-qq:
<< : *rabbitmq
hostname: rmq2-qq
ports:
- - "5681:5672"
- "15681:15672"
- "15701:15692"
qq-moderate-load:
- image: &perf-test-image pivotalrabbitmq/perf-test:2.11.0-ubuntu
+ image: &perf-test-image pivotalrabbitmq/perf-test:2.15.0-ubuntu
networks:
- "rabbitmq-prometheus"
environment:
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distribution.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distribution.json
index 96550fd9e3..5729f399d3 100644
--- a/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distribution.json
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distribution.json
@@ -4,13 +4,13 @@
"type": "grafana",
"id": "grafana",
"name": "Grafana",
- "version": "6.0.0"
+ "version": "7.0.0"
},
{
"type": "datasource",
"id": "prometheus",
"name": "prometheus",
- "version": "1.0.0"
+ "version": "2.0.0"
},
{
"type": "panel",
@@ -34,7 +34,7 @@
"type": "panel",
"id": "flant-statusmap-panel",
"name": "Statusmap",
- "version": "0.1.1"
+ "version": "0.3.4"
}
],
"annotations": {
@@ -114,7 +114,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "count(erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) OR vector(0)",
+ "expr": "count(erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) OR vector(0)",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
@@ -198,7 +198,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "count(erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"} == 3) OR vector(0)",
+ "expr": "count(erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"} == 3) OR vector(0)",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
@@ -282,7 +282,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "count(erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"} == 1) OR vector(0)",
+ "expr": "count(erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"} == 1) OR vector(0)",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
@@ -366,7 +366,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "count(erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"} == 2) OR vector(0)",
+ "expr": "count(erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"} == 2) OR vector(0)",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
@@ -461,7 +461,7 @@
"displayAliasType": "Warning / Critical",
"displayType": "Regular",
"displayValueWithAlias": "Never",
- "expr": "erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_node, rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "erlang_vm_dist_node_state * on(instance) group_left(rabbitmq_node, rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -532,52 +532,52 @@
"seriesOverrides": [
{
"$$hashKey": "object:130",
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
"$$hashKey": "object:131",
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
"$$hashKey": "object:132",
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
"$$hashKey": "object:133",
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
"$$hashKey": "object:134",
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
"$$hashKey": "object:135",
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
"$$hashKey": "object:136",
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
"$$hashKey": "object:137",
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
"$$hashKey": "object:138",
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
"$$hashKey": "object:139",
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -586,7 +586,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "erlang_vm_dist_node_queue_size_bytes * on(instance) group_left(rabbitmq_node, rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "erlang_vm_dist_node_queue_size_bytes * on(instance) group_left(rabbitmq_node, rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
@@ -702,43 +702,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -747,7 +747,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rate(erlang_vm_dist_send_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rate(erlang_vm_dist_send_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -839,52 +839,52 @@
"seriesOverrides": [
{
"$$hashKey": "object:276",
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
"$$hashKey": "object:277",
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
"$$hashKey": "object:278",
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
"$$hashKey": "object:279",
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
"$$hashKey": "object:280",
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
"$$hashKey": "object:281",
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
"$$hashKey": "object:282",
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
"$$hashKey": "object:283",
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
"$$hashKey": "object:284",
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
"$$hashKey": "object:285",
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -893,7 +893,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rate(erlang_vm_dist_recv_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rate(erlang_vm_dist_recv_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} <- {{peer}}",
@@ -986,43 +986,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1031,7 +1031,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rate(erlang_vm_dist_send_cnt[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rate(erlang_vm_dist_send_cnt[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -1120,43 +1120,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1165,7 +1165,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rate(erlang_vm_dist_recv_cnt[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rate(erlang_vm_dist_recv_cnt[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} <- {{peer}}",
@@ -1254,43 +1254,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1299,7 +1299,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "(rate(erlang_vm_dist_send_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) / \n(rate(erlang_vm_dist_send_cnt[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "(rate(erlang_vm_dist_send_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) / \n(rate(erlang_vm_dist_send_cnt[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -1390,43 +1390,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1435,7 +1435,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "(rate(erlang_vm_dist_recv_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) / \n(rate(erlang_vm_dist_recv_cnt[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "(rate(erlang_vm_dist_recv_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) / \n(rate(erlang_vm_dist_recv_cnt[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} <- {{peer}}",
@@ -1538,43 +1538,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1583,7 +1583,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "erlang_vm_dist_port_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "erlang_vm_dist_port_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -1681,7 +1681,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "erlang_vm_dist_port_queue_size_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "erlang_vm_dist_port_queue_size_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -1828,7 +1828,7 @@
"displayAliasType": "Warning / Critical",
"displayType": "Regular",
"displayValueWithAlias": "Never",
- "expr": "erlang_vm_dist_proc_status{type=\"$erlang_vm_dist_proc_type\"} * on(instance) group_left(rabbitmq_node, rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"} ",
+ "expr": "erlang_vm_dist_proc_status{type=\"$erlang_vm_dist_proc_type\"} * on(instance) group_left(rabbitmq_node, rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"} ",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -1907,7 +1907,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "erlang_vm_dist_proc_message_queue_len{type=\"$erlang_vm_dist_proc_type\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "erlang_vm_dist_proc_message_queue_len{type=\"$erlang_vm_dist_proc_type\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -2005,43 +2005,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -2050,7 +2050,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "erlang_vm_dist_proc_memory_bytes{type=\"$erlang_vm_dist_proc_type\"} * on(instance) group_left(rabbitmq_node, rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "erlang_vm_dist_proc_memory_bytes{type=\"$erlang_vm_dist_proc_type\"} * on(instance) group_left(rabbitmq_node, rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -2139,43 +2139,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -2184,7 +2184,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rate(erlang_vm_dist_proc_reductions{type=\"$erlang_vm_dist_proc_type\"}[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rate(erlang_vm_dist_proc_reductions{type=\"$erlang_vm_dist_proc_type\"}[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -2243,13 +2243,50 @@
"templating": {
"list": [
{
- "allValue": null,
"current": {
- "text": "rabbitmq-qq",
- "value": "rabbitmq-qq"
+ "selected": false,
+ "text": "default",
+ "value": "default"
},
+ "hide": 2,
+ "includeAll": false,
+ "label": "datasource",
+ "multi": false,
+ "name": "DS_PROMETHEUS",
+ "options": [],
+ "query": "prometheus",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "type": "datasource"
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": null,
+ "definition": "label_values(rabbitmq_identity_info, namespace)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Namespace",
+ "multi": false,
+ "name": "namespace",
+ "options": [],
+ "query": "label_values(rabbitmq_identity_info, namespace)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {},
"datasource": null,
- "definition": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "definition": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"hide": 0,
"includeAll": false,
"index": -1,
@@ -2257,7 +2294,7 @@
"multi": false,
"name": "rabbitmq_cluster",
"options": [],
- "query": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "query": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
@@ -2328,5 +2365,5 @@
"variables": {
"list": []
},
- "version": 4
+ "version": 20210322
}
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distributions-Compare.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distributions-Compare.json
index fd4cb651d0..a595b15790 100644
--- a/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distributions-Compare.json
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Distributions-Compare.json
@@ -4,13 +4,13 @@
"type": "grafana",
"id": "grafana",
"name": "Grafana",
- "version": "6.0.0"
+ "version": "7.0.0"
},
{
"type": "datasource",
"id": "prometheus",
"name": "prometheus",
- "version": "1.0.0"
+ "version": "2.0.0"
},
{
"type": "table",
@@ -137,7 +137,7 @@
],
"targets": [
{
- "expr": "rate(erlang_vm_dist_send_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=~\"$rabbitmq_cluster\"}",
+ "expr": "rate(erlang_vm_dist_send_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=~\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
"refId": "A"
}
@@ -190,43 +190,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -235,7 +235,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rate(erlang_vm_dist_send_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=~\"$rabbitmq_cluster\"}",
+ "expr": "rate(erlang_vm_dist_send_bytes[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=~\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}} -> {{peer}}",
@@ -1642,6 +1642,46 @@
"templating": {
"list": [
{
+ "current": {
+ "selected": false,
+ "text": "default",
+ "value": "default"
+ },
+ "hide": 2,
+ "includeAll": false,
+ "label": "datasource",
+ "multi": false,
+ "name": "DS_PROMETHEUS",
+ "options": [],
+ "query": "prometheus",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "type": "datasource"
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": null,
+ "definition": "label_values(rabbitmq_identity_info, namespace)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Namespace",
+ "multi": false,
+ "name": "namespace",
+ "options": [],
+ "query": "label_values(rabbitmq_identity_info, namespace)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
"allValue": null,
"current": {
"text": "All",
@@ -1650,14 +1690,14 @@
]
},
"datasource": null,
- "definition": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "definition": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"hide": 0,
"includeAll": true,
"label": "RabbitMQ Cluster",
"multi": true,
"name": "rabbitmq_cluster",
"options": [],
- "query": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "query": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
@@ -1803,5 +1843,5 @@
"timezone": "",
"title": "Erlang-Distributions-Compare",
"uid": "C0jeDstZk",
- "version": 1
+ "version": 20210322
}
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Memory-Allocators.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Memory-Allocators.json
index ffaa8d2b2c..dc968a020f 100644
--- a/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Memory-Allocators.json
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/Erlang-Memory-Allocators.json
@@ -4,13 +4,13 @@
"type": "grafana",
"id": "grafana",
"name": "Grafana",
- "version": "6.0.0"
+ "version": "7.0.0"
},
{
"type": "datasource",
"id": "prometheus",
"name": "prometheus",
- "version": "1.0.0"
+ "version": "2.0.0"
},
{
"type": "panel",
@@ -116,7 +116,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
+ "expr": "sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
@@ -199,7 +199,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "(\n sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n -\n sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n) / sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
+ "expr": "(\n sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n -\n sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n) / sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
@@ -283,7 +283,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
@@ -367,7 +367,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
@@ -451,7 +451,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
@@ -535,7 +535,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (rabbitmq_process_resident_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (rabbitmq_process_resident_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
@@ -629,17 +629,17 @@
],
"targets": [
{
- "expr": "rabbitmq_process_resident_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"}",
+ "expr": "rabbitmq_process_resident_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"}",
"legendFormat": "Resident Set Size",
"refId": "A"
},
{
- "expr": "sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Allocated Used",
"refId": "B"
},
{
- "expr": "sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Allocated Unused",
"refId": "C"
}
@@ -712,7 +712,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rabbitmq_process_resident_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"}",
+ "expr": "rabbitmq_process_resident_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"}",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -720,12 +720,12 @@
"refId": "A"
},
{
- "expr": "sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Allocated Used",
"refId": "B"
},
{
- "expr": "sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Allocated Unused",
"refId": "C"
}
@@ -862,7 +862,7 @@
],
"targets": [
{
- "expr": "sum by(alloc) (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum by(alloc) (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "{{alloc}}",
"refId": "A"
}
@@ -925,7 +925,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum by(alloc) (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum by(alloc) (erlang_vm_allocators{usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1053,7 +1053,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1136,7 +1136,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1219,7 +1219,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1302,7 +1302,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1385,7 +1385,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1468,7 +1468,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1551,7 +1551,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n* 100",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1634,7 +1634,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1717,7 +1717,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1813,32 +1813,32 @@
],
"targets": [
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock - Used",
"refId": "A"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock - Unused",
"refId": "B"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock Pool - Used",
"refId": "C"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock Pool - Unused",
"refId": "D"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Singleblock - Used",
"refId": "E"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Singleblock - Unused",
"refId": "F"
}
@@ -1918,34 +1918,34 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Multiblock - Used",
"refId": "A"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock - Unused",
"refId": "B"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock Pool - Used",
"refId": "C"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"mbcs_pool\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock Pool - Unused",
"refId": "D"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Singleblock - Used",
"refId": "E"
},
{
- "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"carriers_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n-\nsum (erlang_vm_allocators{alloc=~\"$memory_allocator\", usage=\"blocks_size\", kind=\"sbcs\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Singleblock - Unused",
"refId": "F"
}
@@ -2068,32 +2068,32 @@
],
"targets": [
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock - Carrier",
"refId": "A"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock - Block",
"refId": "B"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock Pool - Carrier",
"refId": "C"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock Pool - Block",
"refId": "D"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Singleblock - Carrier",
"refId": "E"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Singleblock - Block",
"refId": "F"
}
@@ -2173,34 +2173,34 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Multiblock - Block",
"refId": "A"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock - Carrier",
"refId": "B"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock Pool - Block",
"refId": "C"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"mbcs_pool\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Multiblock Pool - Carrier",
"refId": "D"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"blocks\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Singleblock - Block",
"refId": "E"
},
{
- "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", rabbitmq_node=\"$rabbitmq_node\"})",
+ "expr": "sum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers_size\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})\n/\nsum (erlang_vm_allocators{kind=\"sbcs\", alloc=\"$memory_allocator\", usage=\"carriers\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\", rabbitmq_node=\"$rabbitmq_node\"})",
"legendFormat": "Singleblock - Carrier",
"refId": "F"
}
@@ -2257,17 +2257,57 @@
"templating": {
"list": [
{
+ "current": {
+ "selected": false,
+ "text": "default",
+ "value": "default"
+ },
+ "hide": 2,
+ "includeAll": false,
+ "label": "datasource",
+ "multi": false,
+ "name": "DS_PROMETHEUS",
+ "options": [],
+ "query": "prometheus",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "type": "datasource"
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": null,
+ "definition": "label_values(rabbitmq_identity_info, namespace)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Namespace",
+ "multi": false,
+ "name": "namespace",
+ "options": [],
+ "query": "label_values(rabbitmq_identity_info, namespace)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
"allValue": null,
"current": {},
"datasource": null,
- "definition": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "definition": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"hide": 0,
"includeAll": false,
"label": "RabbitMQ Cluster",
"multi": false,
"name": "rabbitmq_cluster",
"options": [],
- "query": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "query": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
@@ -2282,14 +2322,14 @@
"allValue": null,
"current": {},
"datasource": null,
- "definition": "label_values(rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}, rabbitmq_node)",
+ "definition": "label_values(rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}, rabbitmq_node)",
"hide": 0,
"includeAll": false,
"label": "RabbitMQ Node",
"multi": false,
"name": "rabbitmq_node",
"options": [],
- "query": "label_values(rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}, rabbitmq_node)",
+ "query": "label_values(rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}, rabbitmq_node)",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
@@ -2351,5 +2391,5 @@
"timezone": "",
"title": "Erlang-Memory-Allocators",
"uid": "o_rtdpWik",
- "version": 1
+ "version": 20210322
}
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Overview.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Overview.json
index efbed6ef98..2889698023 100644
--- a/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Overview.json
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Overview.json
@@ -10,7 +10,7 @@
"type": "datasource",
"id": "prometheus",
"name": "prometheus",
- "version": "2.0.0"
+ "version": "1.0.0"
},
{
"type": "table",
@@ -48,7 +48,7 @@
"editable": true,
"gnetId": null,
"graphTooltip": 1,
- "iteration": 1605271611134,
+ "iteration": 1610979946179,
"links": [
{
"icon": "doc",
@@ -125,10 +125,11 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rabbitmq_queue_messages_ready * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rabbitmq_queue_messages_ready * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"hide": false,
"instant": false,
+ "interval": "",
"intervalFactor": 1,
"legendFormat": "",
"refId": "A"
@@ -219,9 +220,10 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_published_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rate(rabbitmq_channel_messages_published_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
+ "interval": "",
"intervalFactor": 1,
"legendFormat": "",
"refId": "A"
@@ -311,7 +313,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rabbitmq_channels * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) - sum(rabbitmq_channel_consumers * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rabbitmq_channels * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) - sum(rabbitmq_channel_consumers * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -403,9 +405,10 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rabbitmq_connections * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rabbitmq_connections * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
+ "interval": "",
"intervalFactor": 1,
"legendFormat": "",
"refId": "A"
@@ -495,7 +498,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rabbitmq_queues * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rabbitmq_queues * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -587,7 +590,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rabbitmq_queue_messages_unacked * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rabbitmq_queue_messages_unacked * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"hide": false,
"instant": false,
@@ -680,7 +683,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_redelivered_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) +\nsum(rate(rabbitmq_channel_messages_delivered_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) +\nsum(rate(rabbitmq_channel_messages_delivered_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) +\nsum(rate(rabbitmq_channel_get_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) +\nsum(rate(rabbitmq_channel_get_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rate(rabbitmq_channel_messages_redelivered_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\nsum(rate(rabbitmq_channel_messages_delivered_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\nsum(rate(rabbitmq_channel_messages_delivered_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\nsum(rate(rabbitmq_channel_get_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\nsum(rate(rabbitmq_channel_get_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"hide": false,
"instant": false,
@@ -773,7 +776,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rabbitmq_channel_consumers * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rabbitmq_channel_consumers * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -865,7 +868,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rabbitmq_channels * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rabbitmq_channels * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -957,7 +960,7 @@
"tableColumn": "",
"targets": [
{
- "expr": "sum(rabbitmq_build_info * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "sum(rabbitmq_build_info * on(instance) group_left(rabbitmq_cluster) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1206,7 +1209,7 @@
],
"targets": [
{
- "expr": "rabbitmq_build_info * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rabbitmq_build_info * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "table",
"instant": true,
"intervalFactor": 1,
@@ -1268,43 +1271,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1313,7 +1316,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "(rabbitmq_resident_memory_limit_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) -\n(rabbitmq_process_resident_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "(rabbitmq_resident_memory_limit_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) -\n(rabbitmq_process_resident_memory_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1429,43 +1432,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1474,7 +1477,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rabbitmq_disk_space_available_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rabbitmq_disk_space_available_bytes * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1591,43 +1594,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1636,7 +1639,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "(rabbitmq_process_max_fds * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) -\n(rabbitmq_process_open_fds * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "(rabbitmq_process_max_fds * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) -\n(rabbitmq_process_open_fds * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1752,43 +1755,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1797,7 +1800,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "(rabbitmq_process_max_tcp_sockets * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) -\n(rabbitmq_process_open_tcp_sockets * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})",
+ "expr": "(rabbitmq_process_max_tcp_sockets * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) -\n(rabbitmq_process_open_tcp_sockets * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -1928,43 +1931,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -1973,7 +1976,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rabbitmq_queue_messages_ready * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rabbitmq_queue_messages_ready * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -2073,43 +2076,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -2118,7 +2121,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rabbitmq_queue_messages_unacked * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rabbitmq_queue_messages_unacked * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -2232,43 +2235,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -2277,7 +2280,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_published_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_published_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -2377,43 +2380,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -2422,7 +2425,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_confirmed_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_confirmed_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -2522,43 +2525,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -2567,7 +2570,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_queue_messages_published_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_queue_messages_published_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -2667,43 +2670,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -2712,7 +2715,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_unconfirmed[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_unconfirmed[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -2802,6 +2805,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -2818,7 +2824,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_unroutable_dropped_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_unroutable_dropped_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -2917,6 +2923,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -2933,7 +2942,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_unroutable_returned_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_unroutable_returned_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -3029,6 +3038,7 @@
"x": 0,
"y": 42
},
+ "hiddenSeries": false,
"id": 14,
"legend": {
"alignAsTable": true,
@@ -3045,6 +3055,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -3052,43 +3065,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -3097,7 +3110,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(\n (rate(rabbitmq_channel_messages_delivered_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) +\n (rate(rabbitmq_channel_messages_delivered_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})\n) by(rabbitmq_node)",
+ "expr": "sum(\n (rate(rabbitmq_channel_messages_delivered_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n (rate(rabbitmq_channel_messages_delivered_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})\n) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -3170,6 +3183,7 @@
"x": 12,
"y": 42
},
+ "hiddenSeries": false,
"id": 15,
"legend": {
"alignAsTable": true,
@@ -3186,6 +3200,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -3193,43 +3210,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -3238,7 +3255,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_redelivered_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_redelivered_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -3328,6 +3345,7 @@
"x": 0,
"y": 47
},
+ "hiddenSeries": false,
"id": 20,
"legend": {
"alignAsTable": true,
@@ -3344,6 +3362,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -3351,43 +3372,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -3396,7 +3417,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_delivered_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_delivered_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -3469,6 +3490,7 @@
"x": 12,
"y": 47
},
+ "hiddenSeries": false,
"id": 21,
"legend": {
"alignAsTable": true,
@@ -3485,6 +3507,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -3492,43 +3517,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -3537,7 +3562,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_delivered_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_delivered_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -3610,6 +3635,7 @@
"x": 0,
"y": 52
},
+ "hiddenSeries": false,
"id": 22,
"legend": {
"alignAsTable": true,
@@ -3626,6 +3652,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -3633,43 +3662,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -3678,7 +3707,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_messages_acked_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_messages_acked_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -3751,6 +3780,7 @@
"x": 12,
"y": 52
},
+ "hiddenSeries": false,
"id": 24,
"legend": {
"alignAsTable": true,
@@ -3767,6 +3797,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -3783,7 +3816,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_get_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_get_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -3865,6 +3898,7 @@
"x": 0,
"y": 57
},
+ "hiddenSeries": false,
"id": 25,
"legend": {
"alignAsTable": true,
@@ -3882,6 +3916,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -3898,7 +3935,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_get_empty_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_get_empty_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -3980,6 +4017,7 @@
"x": 12,
"y": 57
},
+ "hiddenSeries": false,
"id": 23,
"legend": {
"alignAsTable": true,
@@ -3996,6 +4034,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -4012,7 +4053,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channel_get_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channel_get_ack_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -4108,6 +4149,7 @@
"x": 0,
"y": 63
},
+ "hiddenSeries": false,
"id": 57,
"legend": {
"alignAsTable": true,
@@ -4124,6 +4166,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -4131,43 +4176,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -4176,9 +4221,10 @@
"steppedLine": false,
"targets": [
{
- "expr": "rabbitmq_queues * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rabbitmq_queues * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"instant": false,
+ "interval": "",
"intervalFactor": 1,
"legendFormat": "{{rabbitmq_node}}",
"refId": "A"
@@ -4249,6 +4295,7 @@
"x": 12,
"y": 63
},
+ "hiddenSeries": false,
"id": 58,
"legend": {
"alignAsTable": true,
@@ -4265,6 +4312,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -4272,43 +4322,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -4317,7 +4367,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_queues_declared_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_queues_declared_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -4407,6 +4457,7 @@
"x": 16,
"y": 63
},
+ "hiddenSeries": false,
"id": 60,
"legend": {
"alignAsTable": true,
@@ -4423,6 +4474,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -4430,43 +4484,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -4475,7 +4529,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_queues_created_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_queues_created_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -4565,6 +4619,7 @@
"x": 20,
"y": 63
},
+ "hiddenSeries": false,
"id": 59,
"legend": {
"alignAsTable": true,
@@ -4581,6 +4636,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -4588,43 +4646,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -4633,7 +4691,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_queues_deleted_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_queues_deleted_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -4737,6 +4795,7 @@
"x": 0,
"y": 69
},
+ "hiddenSeries": false,
"id": 54,
"legend": {
"alignAsTable": true,
@@ -4753,6 +4812,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -4760,43 +4822,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -4805,7 +4867,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rabbitmq_channels * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rabbitmq_channels * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -4878,6 +4940,7 @@
"x": 12,
"y": 69
},
+ "hiddenSeries": false,
"id": 55,
"legend": {
"alignAsTable": true,
@@ -4894,6 +4957,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -4901,43 +4967,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -4946,7 +5012,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channels_opened_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channels_opened_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -5036,6 +5102,7 @@
"x": 18,
"y": 69
},
+ "hiddenSeries": false,
"id": 56,
"legend": {
"alignAsTable": true,
@@ -5052,6 +5119,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -5059,43 +5129,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -5104,7 +5174,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_channels_closed_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_channels_closed_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -5208,6 +5278,7 @@
"x": 0,
"y": 75
},
+ "hiddenSeries": false,
"id": 47,
"legend": {
"alignAsTable": true,
@@ -5224,6 +5295,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -5231,43 +5305,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -5276,7 +5350,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "rabbitmq_connections * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rabbitmq_connections * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -5349,6 +5423,7 @@
"x": 12,
"y": 75
},
+ "hiddenSeries": false,
"id": 48,
"legend": {
"alignAsTable": true,
@@ -5365,6 +5440,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -5372,43 +5450,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -5417,7 +5495,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_connections_opened_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_connections_opened_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"interval": "",
@@ -5508,6 +5586,7 @@
"x": 18,
"y": 75
},
+ "hiddenSeries": false,
"id": 49,
"legend": {
"alignAsTable": true,
@@ -5524,6 +5603,9 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null as zero",
+ "options": {
+ "alertThreshold": true
+ },
"percentage": false,
"pluginVersion": "7.3.2",
"pointradius": 2,
@@ -5531,43 +5613,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -5576,7 +5658,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_connections_closed_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_connections_closed_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -5653,10 +5735,50 @@
"templating": {
"list": [
{
+ "current": {
+ "selected": false,
+ "text": "default",
+ "value": "default"
+ },
+ "hide": 2,
+ "includeAll": false,
+ "label": "datasource",
+ "multi": false,
+ "name": "DS_PROMETHEUS",
+ "options": [],
+ "query": "prometheus",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "type": "datasource"
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": null,
+ "definition": "label_values(rabbitmq_identity_info, namespace)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Namespace",
+ "multi": false,
+ "name": "namespace",
+ "options": [],
+ "query": "label_values(rabbitmq_identity_info, namespace)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
"allValue": null,
"current": {},
"datasource": null,
- "definition": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "definition": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"error": null,
"hide": 0,
"includeAll": false,
@@ -5664,7 +5786,7 @@
"multi": false,
"name": "rabbitmq_cluster",
"options": [],
- "query": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "query": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
@@ -5704,5 +5826,5 @@
"timezone": "",
"title": "RabbitMQ-Overview",
"uid": "Kn5xm-gZk",
- "version": 7
+ "version": 20210322
}
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-PerfTest.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-PerfTest.json
index 374616a3d9..f6fa898a58 100644
--- a/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-PerfTest.json
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-PerfTest.json
@@ -4,13 +4,13 @@
"type": "grafana",
"id": "grafana",
"name": "Grafana",
- "version": "6.0.0"
+ "version": "7.0.0"
},
{
"type": "datasource",
"id": "prometheus",
"name": "prometheus",
- "version": "1.0.0"
+ "version": "2.0.0"
},
{
"type": "table",
@@ -1655,6 +1655,24 @@
"templating": {
"list": [
{
+ "current": {
+ "selected": false,
+ "text": "default",
+ "value": "default"
+ },
+ "hide": 2,
+ "includeAll": false,
+ "label": "datasource",
+ "multi": false,
+ "name": "DS_PROMETHEUS",
+ "options": [],
+ "query": "prometheus",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "type": "datasource"
+ },
+ {
"allValue": null,
"current": {
"text": "All",
@@ -1735,5 +1753,5 @@
"timezone": "",
"title": "RabbitMQ-PerfTest",
"uid": "pK9UatSiz",
- "version": 1
+ "version": 20210322
}
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Quorum-Queues-Raft.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Quorum-Queues-Raft.json
index 08413a7f73..b8d18ab491 100644
--- a/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Quorum-Queues-Raft.json
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Quorum-Queues-Raft.json
@@ -4,7 +4,13 @@
"type": "grafana",
"id": "grafana",
"name": "Grafana",
- "version": "6.0.0"
+ "version": "7.0.0"
+ },
+ {
+ "type": "datasource",
+ "id": "prometheus",
+ "name": "Prometheus",
+ "version": "2.0.0"
},
{
"type": "panel",
@@ -17,12 +23,6 @@
"id": "heatmap",
"name": "Heatmap",
"version": ""
- },
- {
- "type": "datasource",
- "id": "prometheus",
- "name": "Prometheus",
- "version": "1.0.0"
}
],
"annotations": {
@@ -99,43 +99,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -144,7 +144,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_raft_log_commit_index[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_raft_log_commit_index[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -228,7 +228,7 @@
"reverseYBuckets": false,
"targets": [
{
- "expr": "rabbitmq_raft_entry_commit_latency_seconds * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}",
+ "expr": "rabbitmq_raft_entry_commit_latency_seconds * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -308,43 +308,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -353,7 +353,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(\n (rabbitmq_raft_log_last_written_index * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) -\n (rabbitmq_raft_log_commit_index * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})\n) by(rabbitmq_node)",
+ "expr": "sum(\n (rabbitmq_raft_log_last_written_index * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) -\n (rabbitmq_raft_log_commit_index * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})\n) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -447,43 +447,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -492,7 +492,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(rate(rabbitmq_raft_term_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) by(rabbitmq_node)",
+ "expr": "sum(rate(rabbitmq_raft_term_total[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) by(rabbitmq_node)",
"format": "time_series",
"instant": false,
"intervalFactor": 1,
@@ -595,43 +595,43 @@
"renderer": "flot",
"seriesOverrides": [
{
- "alias": "/^rabbit@[\\w.-]+0/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/",
"color": "#56A64B"
},
{
- "alias": "/^rabbit@[\\w.-]+1/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/",
"color": "#F2CC0C"
},
{
- "alias": "/^rabbit@[\\w.-]+2/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/",
"color": "#3274D9"
},
{
- "alias": "/^rabbit@[\\w.-]+3/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/",
"color": "#A352CC"
},
{
- "alias": "/^rabbit@[\\w.-]+4/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/",
"color": "#FF780A"
},
{
- "alias": "/^rabbit@[\\w.-]+5/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/",
"color": "#96D98D"
},
{
- "alias": "/^rabbit@[\\w.-]+6/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/",
"color": "#FFEE52"
},
{
- "alias": "/^rabbit@[\\w.-]+7/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/",
"color": "#8AB8FF"
},
{
- "alias": "/^rabbit@[\\w.-]+8/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/",
"color": "#CA95E5"
},
{
- "alias": "/^rabbit@[\\w.-]+9/",
+ "alias": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/",
"color": "#FFB357"
}
],
@@ -640,7 +640,7 @@
"steppedLine": false,
"targets": [
{
- "expr": "sum(\n (rabbitmq_raft_log_last_written_index * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"}) - \n (rabbitmq_raft_log_snapshot_index * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\"})\n) by(queue, rabbitmq_node) > 5000",
+ "expr": "sum(\n (rabbitmq_raft_log_last_written_index * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) - \n (rabbitmq_raft_log_snapshot_index * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})\n) by(queue, rabbitmq_node) > 5000",
"hide": false,
"legendFormat": "{{rabbitmq_node}} {{queue}}",
"refId": "A"
@@ -698,20 +698,60 @@
"templating": {
"list": [
{
+ "current": {
+ "selected": false,
+ "text": "default",
+ "value": "default"
+ },
+ "hide": 2,
+ "includeAll": false,
+ "label": "datasource",
+ "multi": false,
+ "name": "DS_PROMETHEUS",
+ "options": [],
+ "query": "prometheus",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "type": "datasource"
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": null,
+ "definition": "label_values(rabbitmq_identity_info, namespace)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Namespace",
+ "multi": false,
+ "name": "namespace",
+ "options": [],
+ "query": "label_values(rabbitmq_identity_info, namespace)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
"allValue": null,
"current": {
"text": "",
"value": ""
},
"datasource": null,
- "definition": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "definition": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"hide": 0,
"includeAll": false,
"label": "RabbitMQ Cluster",
"multi": false,
"name": "rabbitmq_cluster",
"options": [],
- "query": "label_values(rabbitmq_identity_info, rabbitmq_cluster)",
+ "query": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
@@ -751,5 +791,5 @@
"timezone": "",
"title": "RabbitMQ-Quorum-Queues-Raft",
"uid": "f1Mee9nZz",
- "version": 1
+ "version": 20210322
}
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Stream.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Stream.json
new file mode 100644
index 0000000000..521e9048d5
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/RabbitMQ-Stream.json
@@ -0,0 +1,3005 @@
+{
+ "__requires": [
+ {
+ "type": "grafana",
+ "id": "grafana",
+ "name": "Grafana",
+ "version": "8.0.6"
+ },
+ {
+ "type": "panel",
+ "id": "piechart",
+ "name": "Pie chart",
+ "version": ""
+ },
+ {
+ "type": "datasource",
+ "id": "prometheus",
+ "name": "prometheus",
+ "version": "1.0.0"
+ },
+ {
+ "type": "panel",
+ "id": "stat",
+ "name": "Stat",
+ "version": ""
+ },
+ {
+ "type": "panel",
+ "id": "text",
+ "name": "Text",
+ "version": ""
+ },
+ {
+ "type": "panel",
+ "id": "timeseries",
+ "name": "Time series",
+ "version": ""
+ }
+ ],
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "description": "Stream protocol message rates and errors",
+ "editable": true,
+ "gnetId": 14798,
+ "graphTooltip": 1,
+ "id": null,
+ "iteration": 1628683079578,
+ "links": [],
+ "panels": [
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "decimals": 0,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "transparent",
+ "value": null
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 4,
+ "x": 0,
+ "y": 0
+ },
+ "id": 2,
+ "options": {
+ "colorMode": "background",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "text": {},
+ "textMode": "auto"
+ },
+ "pluginVersion": "8.0.6",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_publishers{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "title": "Stream publishers",
+ "type": "stat"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "decimals": 1,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "transparent",
+ "value": null
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 4,
+ "x": 4,
+ "y": 0
+ },
+ "id": 14,
+ "options": {
+ "colorMode": "background",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "text": {},
+ "textMode": "value"
+ },
+ "pluginVersion": "8.0.6",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (irate(rabbitmq_global_messages_received_total{protocol=\"stream\"}[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "title": "Stream messages received / s",
+ "type": "stat"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "description": "",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "decimals": 1,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "transparent",
+ "value": null
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 4,
+ "x": 8,
+ "y": 0
+ },
+ "id": 10,
+ "options": {
+ "colorMode": "background",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "text": {},
+ "textMode": "value"
+ },
+ "pluginVersion": "8.0.6",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (irate(rabbitmq_global_messages_confirmed_total{protocol=\"stream\"}[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "title": "Stream messages confirmed to publishers / s",
+ "type": "stat"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "decimals": 0,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "transparent",
+ "value": null
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 4,
+ "x": 12,
+ "y": 0
+ },
+ "id": 4,
+ "options": {
+ "colorMode": "background",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "text": {},
+ "textMode": "auto"
+ },
+ "pluginVersion": "8.0.6",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_consumers{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "title": "Stream consumers",
+ "type": "stat"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "decimals": 1,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "transparent",
+ "value": null
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 4,
+ "x": 16,
+ "y": 0
+ },
+ "id": 8,
+ "options": {
+ "colorMode": "background",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "text": {},
+ "textMode": "value"
+ },
+ "pluginVersion": "8.0.6",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (irate(rabbitmq_global_messages_delivered_total{protocol=\"stream\"}[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "title": "Stream messages delivered / s",
+ "type": "stat"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "description": "",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "decimals": 0,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "transparent",
+ "value": null
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 4,
+ "x": 20,
+ "y": 0
+ },
+ "id": 21,
+ "options": {
+ "colorMode": "background",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "text": {},
+ "textMode": "auto"
+ },
+ "pluginVersion": "8.0.6",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_access_refused_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_authentication_failure_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_frame_too_large_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_internal_error_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_precondition_failed_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_publisher_does_not_exist_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_sasl_authentication_failure_loopback_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_sasl_challenge_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_sasl_error_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_sasl_mechanism_not_supported_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_stream_already_exists_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_stream_does_not_exist_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_stream_not_available_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_subscription_id_already_exists_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_subscription_id_does_not_exist_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_unknown_frame_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}) +\n\nsum by(rabbitmq_cluster) (rabbitmq_global_stream_error_vhost_access_failure_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})\n",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "title": "Errors since boot",
+ "type": "stat"
+ },
+ {
+ "cacheTimeout": null,
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ }
+ },
+ "decimals": 0,
+ "mappings": [],
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 0,
+ "y": 4
+ },
+ "id": 16,
+ "links": [],
+ "options": {
+ "displayLabels": [
+ "value"
+ ],
+ "legend": {
+ "calcs": [],
+ "displayMode": "hidden",
+ "placement": "bottom",
+ "values": [
+ "value"
+ ]
+ },
+ "pieType": "pie",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "tooltip": {
+ "mode": "multi"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "repeat": null,
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sort_desc(sum by(rabbitmq_node) (rabbitmq_global_publishers{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}))",
+ "format": "time_series",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{rabbitmq_node}}",
+ "refId": "A"
+ }
+ ],
+ "type": "piechart"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "fixedColor": "rgba(128, 128, 128, 1)",
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ }
+ },
+ "mappings": [],
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 4,
+ "y": 4
+ },
+ "id": 17,
+ "options": {
+ "displayLabels": [
+ "value"
+ ],
+ "legend": {
+ "displayMode": "hidden",
+ "placement": "bottom"
+ },
+ "pieType": "pie",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "tooltip": {
+ "mode": "multi"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sort_desc(sum by(rabbitmq_node) (irate(rabbitmq_global_messages_received_total{protocol=\"stream\"}[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}))",
+ "format": "time_series",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{rabbitmq_node}}",
+ "refId": "A"
+ }
+ ],
+ "type": "piechart"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "fixedColor": "rgba(128, 128, 128, 1)",
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ }
+ },
+ "mappings": [],
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 8,
+ "y": 4
+ },
+ "id": 18,
+ "options": {
+ "displayLabels": [
+ "value"
+ ],
+ "legend": {
+ "displayMode": "hidden",
+ "placement": "bottom",
+ "values": []
+ },
+ "pieType": "pie",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "tooltip": {
+ "mode": "multi"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sort_desc(sum by(rabbitmq_node) (irate(rabbitmq_global_messages_confirmed_total{protocol=\"stream\"}[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}))",
+ "format": "time_series",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{rabbitmq_node}}",
+ "refId": "A"
+ }
+ ],
+ "type": "piechart"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "fixedColor": "rgba(128, 128, 128, 1)",
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ }
+ },
+ "mappings": [],
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 12,
+ "y": 4
+ },
+ "id": 19,
+ "options": {
+ "displayLabels": [
+ "value"
+ ],
+ "legend": {
+ "displayMode": "hidden",
+ "placement": "bottom"
+ },
+ "pieType": "pie",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "tooltip": {
+ "mode": "multi"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sort_desc(sum by(rabbitmq_node) (rabbitmq_global_consumers{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}))",
+ "format": "time_series",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{rabbitmq_node}}",
+ "refId": "A"
+ }
+ ],
+ "type": "piechart"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "fixedColor": "rgba(128, 128, 128, 1)",
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ }
+ },
+ "mappings": [],
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 16,
+ "y": 4
+ },
+ "id": 20,
+ "options": {
+ "displayLabels": [
+ "value"
+ ],
+ "legend": {
+ "displayMode": "hidden",
+ "placement": "bottom"
+ },
+ "pieType": "pie",
+ "reduceOptions": {
+ "calcs": [
+ "last"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "tooltip": {
+ "mode": "multi"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sort_desc(sum by(rabbitmq_node) (irate(rabbitmq_global_messages_delivered_total{protocol=\"stream\"}[60s]) * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"}))",
+ "format": "time_series",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{rabbitmq_node}}",
+ "refId": "A"
+ }
+ ],
+ "type": "piechart"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 20,
+ "y": 4
+ },
+ "id": 25,
+ "options": {
+ "content": "&nbsp; [What are Streams?](https://www.rabbitmq.com/streams.html)\n\n&nbsp; [Streams Overview + slides](https://blog.rabbitmq.com/posts/2021/07/rabbitmq-streams-overview/)\n\n&nbsp; [First Application + video](https://blog.rabbitmq.com/posts/2021/07/rabbitmq-streams-first-application/)\n\n&nbsp; [Using Stream protocol + diagrams](https://blog.rabbitmq.com/posts/2021/07/connecting-to-streams/)",
+ "mode": "markdown"
+ },
+ "pluginVersion": "8.0.6",
+ "title": "Learn more",
+ "transparent": true,
+ "type": "text"
+ },
+ {
+ "datasource": "-- Dashboard --",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 20,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "lineInterpolation": "smooth",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "decimals": 0,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 10
+ },
+ "id": 3,
+ "options": {
+ "legend": {
+ "calcs": [
+ "mean",
+ "max",
+ "min",
+ "last"
+ ],
+ "displayMode": "table",
+ "placement": "bottom"
+ },
+ "tooltip": {
+ "mode": "single"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "panelId": 16,
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Stream publishers",
+ "type": "timeseries"
+ },
+ {
+ "datasource": "-- Dashboard --",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 20,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "lineInterpolation": "smooth",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "decimals": 0,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 12,
+ "y": 10
+ },
+ "id": 5,
+ "options": {
+ "legend": {
+ "calcs": [
+ "mean",
+ "max",
+ "min",
+ "last"
+ ],
+ "displayMode": "table",
+ "placement": "bottom"
+ },
+ "tooltip": {
+ "mode": "single"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "panelId": 19,
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Stream consumers",
+ "type": "timeseries"
+ },
+ {
+ "datasource": "-- Dashboard --",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 20,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "lineInterpolation": "smooth",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 19
+ },
+ "id": 7,
+ "options": {
+ "legend": {
+ "calcs": [
+ "mean",
+ "max",
+ "min",
+ "last"
+ ],
+ "displayMode": "table",
+ "placement": "bottom"
+ },
+ "tooltip": {
+ "mode": "single"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "panelId": 17,
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Stream messages received / s",
+ "type": "timeseries"
+ },
+ {
+ "datasource": "-- Dashboard --",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 20,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "lineInterpolation": "smooth",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 12,
+ "y": 19
+ },
+ "id": 9,
+ "options": {
+ "legend": {
+ "calcs": [
+ "mean",
+ "max",
+ "min",
+ "last"
+ ],
+ "displayMode": "table",
+ "placement": "bottom"
+ },
+ "tooltip": {
+ "mode": "single"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "panelId": 20,
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Stream messages delivered / s",
+ "type": "timeseries"
+ },
+ {
+ "datasource": "-- Dashboard --",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 20,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "lineInterpolation": "smooth",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?0(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(86, 166, 75, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?1(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(242, 204, 12, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?2(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(50, 116, 217, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?3(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(163, 82, 204, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?4(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 120, 10, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?5(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(150, 217, 141, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?6(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 238, 82, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?7(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(138, 184, 255, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?8(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(202, 149, 229, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byRegexp",
+ "options": "/^rabbit@[a-zA-Z\\.\\-]*?9(\\b|\\.)/"
+ },
+ "properties": [
+ {
+ "id": "color",
+ "value": {
+ "fixedColor": "rgba(255, 179, 87, 1)",
+ "mode": "fixed"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 28
+ },
+ "id": 11,
+ "options": {
+ "legend": {
+ "calcs": [
+ "mean",
+ "max",
+ "min",
+ "last"
+ ],
+ "displayMode": "table",
+ "placement": "bottom"
+ },
+ "tooltip": {
+ "mode": "single"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "panelId": 18,
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Stream messages confirmed to publishers / s",
+ "type": "timeseries"
+ },
+ {
+ "datasource": "${DS_PROMETHEUS}",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 20,
+ "gradientMode": "opacity",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "lineInterpolation": "stepBefore",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "dark-red",
+ "value": null
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 24,
+ "x": 0,
+ "y": 37
+ },
+ "id": 23,
+ "options": {
+ "legend": {
+ "calcs": [
+ "last"
+ ],
+ "displayMode": "table",
+ "placement": "right"
+ },
+ "tooltip": {
+ "mode": "single"
+ }
+ },
+ "pluginVersion": "8.0.3",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_access_refused_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "interval": "",
+ "legendFormat": "access_refused",
+ "refId": "A"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_authentication_failure_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "error_authentication_failure",
+ "refId": "B"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_frame_too_large_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "frame_too_large",
+ "refId": "C"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_internal_error_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "internal_error",
+ "refId": "D"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_precondition_failed_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "precondition_failed",
+ "refId": "E"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_publisher_does_not_exist_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "publisher_does_not_exist",
+ "refId": "F"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_sasl_authentication_failure_loopback_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "sasl_authentication_failure_loopback",
+ "refId": "G"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_sasl_challenge_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "sasl_challenge",
+ "refId": "H"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_sasl_error_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "sasl_error",
+ "refId": "I"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_sasl_mechanism_not_supported_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "sasl_mechanism_not_supported",
+ "refId": "J"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_stream_already_exists_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "stream_already_exists",
+ "refId": "K"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_stream_does_not_exist_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "stream_does_not_exist",
+ "refId": "L"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_stream_not_available_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "stream_not_available",
+ "refId": "M"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_subscription_id_already_exists_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "subscription_id_already_exists",
+ "refId": "N"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_subscription_id_does_not_exist_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "subscription_id_does_not_exist",
+ "refId": "O"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_unknown_frame_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "error_unknown_frame",
+ "refId": "P"
+ },
+ {
+ "exemplar": true,
+ "expr": "sum by(rabbitmq_cluster) (rabbitmq_global_stream_error_vhost_access_failure_total{protocol=\"stream\"} * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_cluster=\"$rabbitmq_cluster\", namespace=\"$namespace\"})",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "vhost_access_failure",
+ "refId": "Q"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Errors since boot",
+ "type": "timeseries"
+ }
+ ],
+ "refresh": "15s",
+ "schemaVersion": 30,
+ "style": "dark",
+ "tags": [
+ "rabbitmq-stream",
+ "rabbitmq-prometheus"
+ ],
+ "templating": {
+ "list": [
+ {
+ "current": {
+ "selected": false,
+ "text": "default",
+ "value": "default"
+ },
+ "hide": 2,
+ "includeAll": false,
+ "label": "datasource",
+ "multi": false,
+ "name": "DS_PROMETHEUS",
+ "options": [],
+ "query": "prometheus",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "type": "datasource",
+ "datasource": "${DS_PROMETHEUS}"
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": "${DS_PROMETHEUS}",
+ "definition": "label_values(rabbitmq_identity_info, namespace)",
+ "description": null,
+ "error": null,
+ "hide": 0,
+ "includeAll": false,
+ "label": "Namespace",
+ "multi": false,
+ "name": "namespace",
+ "options": [],
+ "query": {
+ "query": "label_values(rabbitmq_identity_info, namespace)",
+ "refId": "StandardVariableQuery"
+ },
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": "${DS_PROMETHEUS}",
+ "definition": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
+ "description": null,
+ "error": null,
+ "hide": 0,
+ "includeAll": false,
+ "label": "RabbitMQ Cluster",
+ "multi": false,
+ "name": "rabbitmq_cluster",
+ "options": [],
+ "query": {
+ "query": "label_values(rabbitmq_identity_info{namespace=\"$namespace\"}, rabbitmq_cluster)",
+ "refId": "StandardVariableQuery"
+ },
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-1h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "15s",
+ "30s",
+ "1m",
+ "5m",
+ "10m"
+ ]
+ },
+ "timezone": "",
+ "title": "RabbitMQ-Stream",
+ "uid": "j7MCpqZ7k",
+ "version": 20210811
+}
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/inet_tcp_metrics.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/inet_tcp_metrics.json
new file mode 100644
index 0000000000..1453905d44
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/inet_tcp_metrics.json
@@ -0,0 +1,1715 @@
+{
+ "__requires": [
+ {
+ "type": "panel",
+ "id": "bargauge",
+ "name": "Bar gauge",
+ "version": ""
+ },
+ {
+ "type": "grafana",
+ "id": "grafana",
+ "name": "Grafana",
+ "version": "7.0.0"
+ },
+ {
+ "type": "panel",
+ "id": "graph",
+ "name": "Graph",
+ "version": ""
+ },
+ {
+ "type": "panel",
+ "id": "heatmap",
+ "name": "Heatmap",
+ "version": ""
+ },
+ {
+ "type": "datasource",
+ "id": "prometheus",
+ "name": "Prometheus",
+ "version": "2.0.0"
+ },
+ {
+ "type": "panel",
+ "id": "table-old",
+ "name": "Table (old)",
+ "version": ""
+ },
+ {
+ "type": "panel",
+ "id": "text",
+ "name": "Text",
+ "version": ""
+ }
+ ],
+ "annotations": {
+ "list": [
+ {
+ "$$hashKey": "object:45",
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": null,
+ "iteration": 1611576247353,
+ "links": [],
+ "panels": [
+ {
+ "columns": [],
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fontSize": "100%",
+ "gridPos": {
+ "h": 3,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 12,
+ "links": [],
+ "pageSize": null,
+ "scroll": true,
+ "showHeader": true,
+ "sort": {
+ "col": 8,
+ "desc": false
+ },
+ "styles": [
+ {
+ "$$hashKey": "object:3188",
+ "alias": "Erlang/OTP",
+ "align": "",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "mappingType": 1,
+ "pattern": "erlang_version",
+ "preserveFormat": false,
+ "thresholds": [],
+ "type": "string",
+ "unit": "short"
+ },
+ {
+ "$$hashKey": "object:3209",
+ "alias": "RabbitMQ",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "mappingType": 1,
+ "pattern": "rabbitmq_version",
+ "thresholds": [],
+ "type": "string",
+ "unit": "short"
+ },
+ {
+ "$$hashKey": "object:3230",
+ "alias": "Tanzu RabbitMQ",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "mappingType": 1,
+ "pattern": "product_version",
+ "thresholds": [],
+ "type": "string",
+ "unit": "short"
+ },
+ {
+ "$$hashKey": "object:3355",
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "mappingType": 1,
+ "pattern": "Time",
+ "thresholds": [],
+ "type": "hidden",
+ "unit": "short"
+ },
+ {
+ "$$hashKey": "object:3366",
+ "alias": "",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "mappingType": 1,
+ "pattern": "Value",
+ "thresholds": [],
+ "type": "hidden",
+ "unit": "short"
+ },
+ {
+ "$$hashKey": "object:3377",
+ "alias": "prometheus.erl",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "mappingType": 1,
+ "pattern": "prometheus_client_version",
+ "thresholds": [],
+ "type": "string",
+ "unit": "short"
+ },
+ {
+ "$$hashKey": "object:3398",
+ "alias": "Node name",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "mappingType": 1,
+ "pattern": "rabbitmq_node",
+ "thresholds": [],
+ "type": "string",
+ "unit": "short"
+ },
+ {
+ "$$hashKey": "object:3581",
+ "alias": "Cluster name",
+ "align": "auto",
+ "colorMode": null,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "dateFormat": "YYYY-MM-DD HH:mm:ss",
+ "decimals": 2,
+ "mappingType": 1,
+ "pattern": "rabbitmq_cluster",
+ "thresholds": [],
+ "type": "string",
+ "unit": "short"
+ }
+ ],
+ "targets": [
+ {
+ "expr": "max(rabbitmq_build_info * on(instance) group_left(rabbitmq_cluster, rabbitmq_node) rabbitmq_identity_info{rabbitmq_node=~\"$src_node|$dst_node\"}) by(erlang_version, rabbitmq_version, product_version, prometheus_client_version, rabbitmq_node, rabbitmq_cluster)",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "intervalFactor": 1,
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "",
+ "transform": "table",
+ "type": "table-old"
+ },
+ {
+ "collapsed": true,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 3
+ },
+ "id": 5,
+ "panels": [
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "rgb(255, 255, 255)",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolatePlasma",
+ "exponent": 0.4,
+ "max": null,
+ "min": null,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 24,
+ "x": 0,
+ "y": 4
+ },
+ "heatmap": {},
+ "hideZeroBuckets": false,
+ "highlightCards": true,
+ "id": 3,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "6.7.4",
+ "reverseYBuckets": false,
+ "scopedVars": {
+ "traffic_direction": {
+ "selected": false,
+ "text": "incoming",
+ "value": "incoming"
+ }
+ },
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_packet_size_bytes_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\", traffic_direction=\"$traffic_direction\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Packets per second by size in bytes, measured over $time_interval",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "tooltipDecimals": 1,
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "none",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "links": []
+ },
+ "overrides": []
+ },
+ "fill": 0,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 7,
+ "w": 12,
+ "x": 0,
+ "y": 12
+ },
+ "hiddenSeries": false,
+ "id": 20,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "options": {
+ "alertThreshold": true
+ },
+ "percentage": false,
+ "pluginVersion": "7.2.1",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "scopedVars": {
+ "traffic_direction": {
+ "selected": false,
+ "text": "incoming",
+ "value": "incoming"
+ }
+ },
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_packet_type_total{src_node=\"$src_node\", dst_node=\"$dst_node\", traffic_direction=\"$traffic_direction\"}[$time_interval])",
+ "interval": "",
+ "legendFormat": "{{packet_type}}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Packets by type per second, measured over $time_interval",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "$$hashKey": "object:1076",
+ "decimals": 1,
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "$$hashKey": "object:1077",
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "rgb(255, 255, 255)",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolatePlasma",
+ "exponent": 0.4,
+ "max": null,
+ "min": null,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": null,
+ "description": "Messages larger than 64KB will be split into multiple fragments. This functionality was introduced in OTP-22 so that smaller messages can be interleaved between larges messages.\n\nWhen distribution runs in debug mode, messages larger than 1KB will be split into multiple fragments.\n\n* [Distribution Header for fragmented messages](http://erlang.org/doc/apps/erts/erl_ext_dist.html#distribution-header-for-fragmented-messages)",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 12,
+ "x": 12,
+ "y": 12
+ },
+ "heatmap": {},
+ "hideZeroBuckets": false,
+ "highlightCards": true,
+ "id": 67,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "6.7.4",
+ "reverseYBuckets": false,
+ "scopedVars": {
+ "traffic_direction": {
+ "selected": false,
+ "text": "incoming",
+ "value": "incoming"
+ }
+ },
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_fragmented_messages_total_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\", traffic_direction=\"$traffic_direction\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Fragmented messages per second by fragments, measured over $time_interval",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "tooltipDecimals": 1,
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "none",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": null,
+ "description": "* [ControlMessage types](http://erlang.org/doc/apps/erts/erl_dist_protocol.html#control_message)",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "links": []
+ },
+ "overrides": []
+ },
+ "fill": 0,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 7,
+ "w": 24,
+ "x": 0,
+ "y": 19
+ },
+ "hiddenSeries": false,
+ "id": 93,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "options": {
+ "alertThreshold": true
+ },
+ "percentage": false,
+ "pluginVersion": "7.2.1",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "scopedVars": {
+ "traffic_direction": {
+ "selected": false,
+ "text": "incoming",
+ "value": "incoming"
+ }
+ },
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_control_message_total{src_node=\"$src_node\", dst_node=\"$dst_node\", traffic_direction=\"$traffic_direction\"}[$time_interval])",
+ "interval": "",
+ "legendFormat": "{{control_message}}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Number of control messages per second, measured over $time_interval",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "$$hashKey": "object:1076",
+ "decimals": 1,
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "$$hashKey": "object:1077",
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "datasource": null,
+ "description": "The distribution header is sent by the erlang distribution to carry metadata about the coming control message and potential payload. It is primarily used to handle the atom cache in the Erlang distribution. Since OTP-22 it is also used to fragment large distribution messages into multiple smaller fragments.\n\n* https://erlang.org/doc/apps/erts/erl_ext_dist.html#normal-distribution-header",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "links": [],
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "rgb(133, 133, 133)",
+ "value": null
+ },
+ {
+ "color": "rgb(255, 255, 255)",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 12,
+ "x": 0,
+ "y": 26
+ },
+ "id": 130,
+ "options": {
+ "displayMode": "gradient",
+ "orientation": "vertical",
+ "reduceOptions": {
+ "calcs": [
+ "max"
+ ],
+ "fields": "",
+ "limit": 100,
+ "values": false
+ },
+ "showUnfilled": false
+ },
+ "pluginVersion": "7.2.1",
+ "scopedVars": {
+ "traffic_direction": {
+ "selected": false,
+ "text": "incoming",
+ "value": "incoming"
+ }
+ },
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_atom_cache_ref_new_entries_total_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\", traffic_direction=\"$traffic_direction\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "New ATOM_CACHE_REF entries per second by entries, measured over $time_interval",
+ "type": "bargauge"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "rgb(255, 255, 255)",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolatePlasma",
+ "exponent": 0.4,
+ "max": null,
+ "min": null,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": null,
+ "description": "The distribution header is sent by the erlang distribution to carry metadata about the coming control message and potential payload. It is primarily used to handle the atom cache in the Erlang distribution. Since OTP-22 it is also used to fragment large distribution messages into multiple smaller fragments.\n\n* https://erlang.org/doc/apps/erts/erl_ext_dist.html#normal-distribution-header",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 12,
+ "x": 12,
+ "y": 26
+ },
+ "heatmap": {},
+ "hideZeroBuckets": false,
+ "highlightCards": true,
+ "id": 138,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "6.7.4",
+ "reverseYBuckets": false,
+ "scopedVars": {
+ "traffic_direction": {
+ "selected": false,
+ "text": "incoming",
+ "value": "incoming"
+ }
+ },
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_atom_cache_ref_new_entries_total_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\", traffic_direction=\"$traffic_direction\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "New ATOM_CACHE_REF entries per second by entries, measured over $time_interval",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "tooltipDecimals": 1,
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "none",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "links": [],
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "rgb(133, 133, 133)",
+ "value": null
+ },
+ {
+ "color": "rgb(255, 255, 255)",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 12,
+ "x": 0,
+ "y": 33
+ },
+ "id": 147,
+ "options": {
+ "displayMode": "gradient",
+ "orientation": "vertical",
+ "reduceOptions": {
+ "calcs": [
+ "max"
+ ],
+ "fields": "",
+ "limit": 100,
+ "values": false
+ },
+ "showUnfilled": false
+ },
+ "pluginVersion": "7.2.1",
+ "scopedVars": {
+ "traffic_direction": {
+ "selected": false,
+ "text": "incoming",
+ "value": "incoming"
+ }
+ },
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_atom_cache_ref_entries_total_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\", traffic_direction=\"$traffic_direction\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total ATOM_CACHE_REF entries per second by entries, measured over $time_interval",
+ "type": "bargauge"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "rgb(255, 255, 255)",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolatePlasma",
+ "exponent": 0.4,
+ "max": null,
+ "min": null,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 12,
+ "x": 12,
+ "y": 33
+ },
+ "heatmap": {},
+ "hideZeroBuckets": false,
+ "highlightCards": true,
+ "id": 148,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "6.7.4",
+ "reverseYBuckets": false,
+ "scopedVars": {
+ "traffic_direction": {
+ "selected": false,
+ "text": "incoming",
+ "value": "incoming"
+ }
+ },
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_atom_cache_ref_entries_total_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\", traffic_direction=\"$traffic_direction\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total ATOM_CACHE_REF entries per second by entries, measured over $time_interval",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "tooltipDecimals": 1,
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "none",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ }
+ ],
+ "repeat": "traffic_direction",
+ "title": "$src_node $traffic_direction traffic from/to $dst_node",
+ "type": "row"
+ },
+ {
+ "collapsed": true,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 5
+ },
+ "id": 159,
+ "panels": [
+ {
+ "datasource": null,
+ "description": "* `1.0e-6` 1 microsecond\n* `1.0e-5` 10 microseconds\n* `0.0001` 100 microseconds\n* `0.001` 1 millisecond",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "links": [],
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "rgb(133, 133, 133)",
+ "value": null
+ },
+ {
+ "color": "rgb(255, 255, 255)",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 6
+ },
+ "id": 126,
+ "options": {
+ "displayMode": "gradient",
+ "orientation": "vertical",
+ "reduceOptions": {
+ "calcs": [
+ "max"
+ ],
+ "fields": "",
+ "limit": 100,
+ "values": false
+ },
+ "showUnfilled": false
+ },
+ "pluginVersion": "7.2.1",
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_receive_duration_seconds_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Packets received from the socket by duration, measured over $time_interval",
+ "type": "bargauge"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "rgb(255, 255, 255)",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolatePlasma",
+ "exponent": 0.4,
+ "max": null,
+ "min": null,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": null,
+ "description": "Lowest value is in microsecond, highest is in milliseconds.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 12,
+ "y": 6
+ },
+ "heatmap": {},
+ "hideZeroBuckets": false,
+ "highlightCards": true,
+ "id": 127,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "6.7.4",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_receive_duration_seconds_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Packets received from the socket by duration, measured over $time_interval",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "tooltipDecimals": 1,
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "none",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "datasource": null,
+ "description": "* [erlang:dist_ctrl_put_data/2](https://erlang.org/doc/man/erlang.html#dist_ctrl_put_data-2)\n\nTime duration conversion\n* `1.0e-6` 1 microsecond\n* `1.0e-5` 10 microseconds\n* `0.0001` 100 microseconds\n* `0.001` 1 millisecond",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "links": [],
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "rgb(133, 133, 133)",
+ "value": null
+ },
+ {
+ "color": "rgb(255, 255, 255)",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 15
+ },
+ "id": 128,
+ "options": {
+ "displayMode": "gradient",
+ "orientation": "vertical",
+ "reduceOptions": {
+ "calcs": [
+ "max"
+ ],
+ "fields": "",
+ "limit": 100,
+ "values": false
+ },
+ "showUnfilled": false
+ },
+ "pluginVersion": "7.2.1",
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_dist_ctrl_put_data_duration_seconds_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Calls that enqueue packets into the VM by duration, measured over $time_interval",
+ "type": "bargauge"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "rgb(255, 255, 255)",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolatePlasma",
+ "exponent": 0.4,
+ "max": null,
+ "min": null,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": null,
+ "description": "* [erlang:dist_ctrl_put_data/2](https://erlang.org/doc/man/erlang.html#dist_ctrl_put_data-2)",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 12,
+ "y": 15
+ },
+ "heatmap": {},
+ "hideZeroBuckets": false,
+ "highlightCards": true,
+ "id": 129,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "6.7.4",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_dist_ctrl_put_data_duration_seconds_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Calls that enqueue packets into the VM by duration, measured over $time_interval",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "tooltipDecimals": 1,
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "none",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ }
+ ],
+ "title": "Function calls for receiving data (incoming traffic)",
+ "type": "row"
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 6
+ },
+ "id": 109,
+ "panels": [],
+ "title": "Function calls for sending data (outgoing traffic)",
+ "type": "row"
+ },
+ {
+ "datasource": null,
+ "description": "* [erlang:dist_ctrl_get_data/1](https://erlang.org/doc/man/erlang.html#dist_ctrl_get_data-1)\n\nTime duration conversion\n* `1.0e-6` 1 microsecond\n* `1.0e-5` 10 microseconds\n* `0.0001` 100 microseconds\n* `0.001` 1 millisecond",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "links": [],
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "rgb(133, 133, 133)",
+ "value": null
+ },
+ {
+ "color": "rgb(255, 255, 255)",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 7
+ },
+ "id": 116,
+ "options": {
+ "displayMode": "gradient",
+ "orientation": "vertical",
+ "reduceOptions": {
+ "calcs": [
+ "max"
+ ],
+ "fields": "",
+ "limit": 100,
+ "values": false
+ },
+ "showUnfilled": false
+ },
+ "pluginVersion": "7.2.1",
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_dist_ctrl_get_data_duration_seconds_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Calls that get packets from the VM by duration, measured over $time_interval",
+ "type": "bargauge"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "rgb(255, 255, 255)",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolatePlasma",
+ "exponent": 0.4,
+ "max": null,
+ "min": null,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": null,
+ "description": "* [erlang:dist_ctrl_get_data/1](https://erlang.org/doc/man/erlang.html#dist_ctrl_get_data-1)\n\nLowest value is in microseconds, highest is in milliseconds.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 12,
+ "y": 7
+ },
+ "heatmap": {},
+ "hideZeroBuckets": false,
+ "highlightCards": true,
+ "id": 117,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "6.7.4",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_dist_ctrl_get_data_duration_seconds_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Calls that get packets from the VM by duration, measured over $time_interval",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "tooltipDecimals": 1,
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "none",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "datasource": null,
+ "description": "The equivalent of `gen_tcp:send/1`, but for Erlang Distribution.\n\nTime duration conversion\n* `1.0e-6` 1 microsecond\n* `1.0e-5` 10 microseconds\n* `0.0001` 100 microseconds\n* `0.001` 1 millisecond",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "links": [],
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "rgb(133, 133, 133)",
+ "value": null
+ },
+ {
+ "color": "rgb(255, 255, 255)",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 16
+ },
+ "id": 124,
+ "options": {
+ "displayMode": "gradient",
+ "orientation": "vertical",
+ "reduceOptions": {
+ "calcs": [
+ "max"
+ ],
+ "fields": "",
+ "limit": 100,
+ "values": false
+ },
+ "showUnfilled": false
+ },
+ "pluginVersion": "7.2.1",
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_driver_send_duration_seconds_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Calls that send packets to the socket by duration, measured over $time_interval",
+ "type": "bargauge"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "rgb(255, 255, 255)",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolatePlasma",
+ "exponent": 0.4,
+ "max": null,
+ "min": null,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": null,
+ "description": "The equivalent of `gen_tcp:send/1`, but for Erlang Distribution.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 12,
+ "y": 16
+ },
+ "heatmap": {},
+ "hideZeroBuckets": false,
+ "highlightCards": true,
+ "id": 125,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "6.7.4",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "rate(inet_tcp_metrics_driver_send_duration_seconds_bucket{src_node=\"$src_node\", dst_node=\"$dst_node\"}[$time_interval])",
+ "format": "heatmap",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "{{le}}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Calls that send packets to the socket by duration, measured over $time_interval",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "tooltipDecimals": 1,
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "none",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "content": "* Publish rabbitmq Docker image with inet_tcp_metrics\n* Publish dashboard to grafana.com\n* Write README\n* Share on the erlang-questions mailing list",
+ "datasource": null,
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 3,
+ "w": 24,
+ "x": 0,
+ "y": 25
+ },
+ "id": 101,
+ "mode": "markdown",
+ "options": {
+ "content": "* Publish rabbitmq Docker image with inet_tcp_metrics\n* Publish dashboard to grafana.com\n* Write README\n* Share on the erlang-questions mailing list",
+ "mode": "markdown"
+ },
+ "pluginVersion": "7.1.0",
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "TODO",
+ "type": "text"
+ }
+ ],
+ "refresh": "10s",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": [],
+ "templating": {
+ "list": [
+ {
+ "current": {
+ "selected": false,
+ "text": "default",
+ "value": "default"
+ },
+ "hide": 2,
+ "includeAll": false,
+ "label": "datasource",
+ "multi": false,
+ "name": "DS_PROMETHEUS",
+ "options": [],
+ "query": "prometheus",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "type": "datasource"
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": null,
+ "definition": "label_values(inet_tcp_metrics_packet_size_bytes_sum, src_node)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Source node",
+ "multi": false,
+ "name": "src_node",
+ "options": [],
+ "query": "label_values(inet_tcp_metrics_packet_size_bytes_sum, src_node)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": null,
+ "definition": "label_values(inet_tcp_metrics_packet_size_bytes_sum{src_node=\"$src_node\"}, dst_node)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Destination node",
+ "multi": false,
+ "name": "dst_node",
+ "options": [],
+ "query": "label_values(inet_tcp_metrics_packet_size_bytes_sum{src_node=\"$src_node\"}, dst_node)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": null,
+ "definition": "label_values(inet_tcp_metrics_packet_size_bytes_sum, traffic_direction)",
+ "hide": 2,
+ "includeAll": true,
+ "label": "Traffic direction",
+ "multi": true,
+ "name": "traffic_direction",
+ "options": [],
+ "query": "label_values(inet_tcp_metrics_packet_size_bytes_sum, traffic_direction)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "auto": false,
+ "auto_count": 30,
+ "auto_min": "10s",
+ "current": {
+ "selected": false,
+ "text": "1m",
+ "value": "1m"
+ },
+ "hide": 0,
+ "label": "Time interval",
+ "name": "time_interval",
+ "options": [
+ {
+ "selected": true,
+ "text": "1m",
+ "value": "1m"
+ },
+ {
+ "selected": false,
+ "text": "5m",
+ "value": "5m"
+ },
+ {
+ "selected": false,
+ "text": "10m",
+ "value": "10m"
+ },
+ {
+ "selected": false,
+ "text": "30m",
+ "value": "30m"
+ },
+ {
+ "selected": false,
+ "text": "1h",
+ "value": "1h"
+ }
+ ],
+ "query": "1m,5m,10m,30m,1h",
+ "refresh": 2,
+ "skipUrlSync": false,
+ "type": "interval"
+ }
+ ]
+ },
+ "time": {
+ "from": "now-5m",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ]
+ },
+ "timezone": "",
+ "title": "inet_tcp_metrics",
+ "uid": "RCTIrQDMz",
+ "version": 20210322
+}
diff --git a/deps/rabbitmq_prometheus/docker/grafana/dashboards/rabbitmq-exporter_vs_rabbitmq-prometheus.json b/deps/rabbitmq_prometheus/docker/grafana/dashboards/rabbitmq-exporter_vs_rabbitmq-prometheus.json
index 1d84e11968..c59c6b4b18 100644
--- a/deps/rabbitmq_prometheus/docker/grafana/dashboards/rabbitmq-exporter_vs_rabbitmq-prometheus.json
+++ b/deps/rabbitmq_prometheus/docker/grafana/dashboards/rabbitmq-exporter_vs_rabbitmq-prometheus.json
@@ -4,13 +4,13 @@
"type": "grafana",
"id": "grafana",
"name": "Grafana",
- "version": "6.0.0"
+ "version": "7.0.0"
},
{
"type": "datasource",
"id": "prometheus",
"name": "prometheus",
- "version": "1.0.0"
+ "version": "2.0.0"
},
{
"type": "panel",
diff --git a/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-14798.md b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-14798.md
new file mode 100644
index 0000000000..1d09ea8afa
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-14798.md
@@ -0,0 +1,32 @@
+# RabbitMQ-Stream
+
+The stream protocol has been introduced in RabbitMQ 3.9.0, and is meant to be used in conjuction with Streams.
+
+Streams are a new persistent and replicated data structure which models an append-only log with non-destructive consumer semantics.
+
+Learn more about [RabbitMQ Streams](https://www.rabbitmq.com/streams.html).
+
+These blog posts expand on the documentation:
+- [Streams Overview](https://blog.rabbitmq.com/posts/2021/07/rabbitmq-streams-overview/) (includes slides)
+- [First Application with Streams](https://blog.rabbitmq.com/posts/2021/07/rabbitmq-streams-first-application/) (includes video)
+- [Connecting to Streams](https://blog.rabbitmq.com/posts/2021/07/connecting-to-streams/) (includes diagrams)
+
+## Metrics displayed
+
+- Stream publishers
+- Stream messages received / s
+- Stream messages confirmed to publishers / s
+- Stream consumers
+- Stream messages delivered / s
+- Errors since boot
+
+## Filter by
+
+- Namespace
+- RabbitMQ Cluster
+
+
+## Requires
+
+- `rabbitmq-stream` plugin to be enabled
+- `rabbitmq-prometheus` plugin to be enabled
diff --git a/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-bottom-2021-07-29-original.png b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-bottom-2021-07-29-original.png
new file mode 100644
index 0000000000..4e8358095a
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-bottom-2021-07-29-original.png
Binary files differ
diff --git a/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-bottom-2021-07-29.jpg b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-bottom-2021-07-29.jpg
new file mode 100644
index 0000000000..3023cba03a
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-bottom-2021-07-29.jpg
Binary files differ
diff --git a/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-logo-2021-07-29-original.jpg b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-logo-2021-07-29-original.jpg
new file mode 100644
index 0000000000..85f5331fa4
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-logo-2021-07-29-original.jpg
Binary files differ
diff --git a/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-top-2021-07-29-original.png b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-top-2021-07-29-original.png
new file mode 100644
index 0000000000..a729661862
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-top-2021-07-29-original.png
Binary files differ
diff --git a/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-top-2021-07-29.jpg b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-top-2021-07-29.jpg
new file mode 100644
index 0000000000..fba1764d78
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/grafana/publish/rabbitmq-stream-top-2021-07-29.jpg
Binary files differ
diff --git a/deps/rabbitmq_prometheus/docker/prometheus.yml b/deps/rabbitmq_prometheus/docker/prometheus.yml
index cec45c7d04..fb91751c05 100644
--- a/deps/rabbitmq_prometheus/docker/prometheus.yml
+++ b/deps/rabbitmq_prometheus/docker/prometheus.yml
@@ -46,6 +46,9 @@ scrape_configs:
- 'rmq0-qq:15692'
- 'rmq1-qq:15692'
- 'rmq2-qq:15692'
+ - 'rmq0-dist-metrics:15692'
+ - 'rmq1-dist-metrics:15692'
+ - 'rmq2-dist-metrics:15692'
- job_name: 'rabbitmq-perf-test'
static_configs:
- targets:
diff --git a/deps/rabbitmq_prometheus/docker/rabbitmq-dist-metrics-definitions.json b/deps/rabbitmq_prometheus/docker/rabbitmq-dist-metrics-definitions.json
new file mode 100644
index 0000000000..c87fbb21df
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/rabbitmq-dist-metrics-definitions.json
@@ -0,0 +1,49 @@
+{
+ "global_parameters": [
+ {"name": "cluster_name", "value": "rabbitmq-dist-metrics"}
+ ],
+ "permissions": [
+ {
+ "configure": ".*",
+ "read": ".*",
+ "user": "guest",
+ "vhost": "/",
+ "write": ".*"
+ }
+ ],
+ "policies": [
+ {
+ "apply-to": "queues",
+ "definition": {"ha-mode": "exactly", "ha-params": 1},
+ "name": "ha1",
+ "pattern": "ha1.*",
+ "priority": 0,
+ "vhost": "/"
+ },
+ {
+ "apply-to": "queues",
+ "definition": {"ha-mode": "exactly", "ha-params": 2},
+ "name": "ha2",
+ "pattern": "ha2.*",
+ "priority": 0,
+ "vhost": "/"
+ },
+ {
+ "apply-to": "queues",
+ "definition": {"ha-mode": "exactly", "ha-params": 3},
+ "name": "ha3",
+ "pattern": "ha3.*",
+ "priority": 0,
+ "vhost": "/"
+ }
+ ],
+ "users": [
+ {
+ "hashing_algorithm": "rabbit_password_hashing_sha256",
+ "name": "guest",
+ "password_hash": "hENva+fxJ7gnmaBK/WhwNHOYbvB53/QjNcqhtF4KqF7p21+x",
+ "tags": "administrator"
+ }
+ ],
+ "vhosts": [{"name": "/"}]
+}
diff --git a/deps/rabbitmq_prometheus/docker/rabbitmq-dist-metrics.conf b/deps/rabbitmq_prometheus/docker/rabbitmq-dist-metrics.conf
new file mode 100644
index 0000000000..a253d823d1
--- /dev/null
+++ b/deps/rabbitmq_prometheus/docker/rabbitmq-dist-metrics.conf
@@ -0,0 +1,32 @@
+# https://github.com/rabbitmq/rabbitmq-server/blob/master/docs/rabbitmq.conf.example
+loopback_users.guest = false
+listeners.tcp.default = 5672
+management.listener.port = 15672
+management.listener.ssl = false
+
+vm_memory_high_watermark.absolute = 768MiB
+vm_memory_high_watermark_paging_ratio = 0.2
+
+cluster_name = rabbitmq-dist-metrics
+
+cluster_formation.peer_discovery_backend = rabbit_peer_discovery_classic_config
+cluster_formation.classic_config.nodes.1 = rabbit@rmq0-dist-metrics
+cluster_formation.classic_config.nodes.2 = rabbit@rmq1-dist-metrics
+cluster_formation.classic_config.nodes.3 = rabbit@rmq2-dist-metrics
+
+load_definitions = /etc/rabbitmq/rabbitmq-definitions.json
+
+# background_gc_enabled = true
+
+# Increase the 5s default so that we are below Prometheus' scrape interval,
+# but still refresh in time for Prometheus scrape
+# This is linked to Prometheus scrape interval & range used with rate()
+collect_statistics_interval = 10000
+
+# Run RabbitMQ Management in Management-only mode, no stats
+# https://github.com/rabbitmq/rabbitmq-management/pull/707
+# management.disable_stats = true
+
+# Return per-object metrics (unaggregated)
+# https://github.com/rabbitmq/rabbitmq-prometheus/pull/28
+# prometheus.return_per_object_metrics = true
diff --git a/deps/rabbitmq_prometheus/docker/rabbitmq-dist-tls.conf b/deps/rabbitmq_prometheus/docker/rabbitmq-dist-tls.conf
index ecc7de7633..94d6aaab01 100644
--- a/deps/rabbitmq_prometheus/docker/rabbitmq-dist-tls.conf
+++ b/deps/rabbitmq_prometheus/docker/rabbitmq-dist-tls.conf
@@ -15,7 +15,7 @@ cluster_formation.classic_config.nodes.1 = rabbit@rmq0-dist-tls
cluster_formation.classic_config.nodes.2 = rabbit@rmq1-dist-tls
cluster_formation.classic_config.nodes.3 = rabbit@rmq2-dist-tls
-management.load_definitions = /etc/rabbitmq/rabbitmq-definitions.json
+load_definitions = /etc/rabbitmq/rabbitmq-definitions.json
# background_gc_enabled = true
diff --git a/deps/rabbitmq_prometheus/docker/rabbitmq-overview.conf b/deps/rabbitmq_prometheus/docker/rabbitmq-overview.conf
index bc157213d3..b276485b27 100644
--- a/deps/rabbitmq_prometheus/docker/rabbitmq-overview.conf
+++ b/deps/rabbitmq_prometheus/docker/rabbitmq-overview.conf
@@ -14,7 +14,7 @@ cluster_formation.classic_config.nodes.1 = rabbit@rmq0
cluster_formation.classic_config.nodes.2 = rabbit@rmq1
cluster_formation.classic_config.nodes.3 = rabbit@rmq2
-management.load_definitions = /etc/rabbitmq/rabbitmq-definitions.json
+load_definitions = /etc/rabbitmq/rabbitmq-definitions.json
# background_gc_enabled = true
diff --git a/deps/rabbitmq_prometheus/docker/rabbitmq-qq.conf b/deps/rabbitmq_prometheus/docker/rabbitmq-qq.conf
index ac50706dab..a61879342f 100644
--- a/deps/rabbitmq_prometheus/docker/rabbitmq-qq.conf
+++ b/deps/rabbitmq_prometheus/docker/rabbitmq-qq.conf
@@ -15,7 +15,7 @@ cluster_formation.classic_config.nodes.1 = rabbit@rmq0-qq
cluster_formation.classic_config.nodes.2 = rabbit@rmq1-qq
cluster_formation.classic_config.nodes.3 = rabbit@rmq2-qq
-management.load_definitions = /etc/rabbitmq/rabbitmq-definitions.json
+load_definitions = /etc/rabbitmq/rabbitmq-definitions.json
# background_gc_enabled = true
diff --git a/deps/rabbitmq_prometheus/erlang.mk b/deps/rabbitmq_prometheus/erlang.mk
deleted file mode 100644
index 4bfafd97de..0000000000
--- a/deps/rabbitmq_prometheus/erlang.mk
+++ /dev/null
@@ -1,7686 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT = plugins/proper plugins/protobuffs
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_prometheus/metrics-detailed.md b/deps/rabbitmq_prometheus/metrics-detailed.md
new file mode 100644
index 0000000000..bec732f8db
--- /dev/null
+++ b/deps/rabbitmq_prometheus/metrics-detailed.md
@@ -0,0 +1,267 @@
+## Configurable RabbitMQ metric groups
+
+Those are metrics than can be explicitly requested via `/metrics/detailed` endpoint.
+
+### Generic metrics
+
+These are some generic metrics, which do not refer to any specific
+queue/connection/etc.
+
+#### Connection/channel/queue churn
+
+Group `connection_churn_metrics`:
+
+| Metric | Description |
+|--------------------------------------------|--------------------------------------------------|
+| rabbitmq_detailed_connections_opened_total | Total number of connections opened |
+| rabbitmq_detailed_connections_closed_total | Total number of connections closed or terminated |
+| rabbitmq_detailed_channels_opened_total | Total number of channels opened |
+| rabbitmq_detailed_channels_closed_total | Total number of channels closed |
+| rabbitmq_detailed_queues_declared_total | Total number of queues declared |
+| rabbitmq_detailed_queues_created_total | Total number of queues created |
+| rabbitmq_detailed_queues_deleted_total | Total number of queues deleted |
+
+
+#### Erlang VM/Disk IO via RabbitMQ
+
+Group `node_coarse_metrics`:
+
+| Metric | Description |
+|-----------------------------------------------------------|-----------------------------------------------------------------------|
+| rabbitmq_detailed_process_open_fds | Open file descriptors |
+| rabbitmq_detailed_process_open_tcp_sockets | Open TCP sockets |
+| rabbitmq_detailed_process_resident_memory_bytes | Memory used in bytes |
+| rabbitmq_detailed_disk_space_available_bytes | Disk space available in bytes |
+| rabbitmq_detailed_erlang_processes_used | Erlang processes used |
+| rabbitmq_detailed_erlang_gc_runs_total | Total number of Erlang garbage collector runs |
+| rabbitmq_detailed_erlang_gc_reclaimed_bytes_total | Total number of bytes of memory reclaimed by Erlang garbage collector |
+| rabbitmq_detailed_erlang_scheduler_context_switches_total | Total number of Erlang scheduler context switches |
+
+Group `node_metrics`:
+
+| Metric | Description |
+|----------------------------------------------------|----------------------------------------|
+| rabbitmq_detailed_process_max_fds | Open file descriptors limit |
+| rabbitmq_detailed_process_max_tcp_sockets | Open TCP sockets limit |
+| rabbitmq_detailed_resident_memory_limit_bytes | Memory high watermark in bytes |
+| rabbitmq_detailed_disk_space_available_limit_bytes | Free disk space low watermark in bytes |
+| rabbitmq_detailed_erlang_processes_limit | Erlang processes limit |
+| rabbitmq_detailed_erlang_scheduler_run_queue | Erlang scheduler run queue |
+| rabbitmq_detailed_erlang_net_ticktime_seconds | Inter-node heartbeat interval |
+| rabbitmq_detailed_erlang_uptime_seconds | Node uptime |
+
+
+Group `node_persister_metrics`:
+
+| Metric | Description |
+|-------------------------------------------------------|------------------------------------------------------|
+| rabbitmq_detailed_io_read_ops_total | Total number of I/O read operations |
+| rabbitmq_detailed_io_read_bytes_total | Total number of I/O bytes read |
+| rabbitmq_detailed_io_write_ops_total | Total number of I/O write operations |
+| rabbitmq_detailed_io_write_bytes_total | Total number of I/O bytes written |
+| rabbitmq_detailed_io_sync_ops_total | Total number of I/O sync operations |
+| rabbitmq_detailed_io_seek_ops_total | Total number of I/O seek operations |
+| rabbitmq_detailed_io_open_attempt_ops_total | Total number of file open attempts |
+| rabbitmq_detailed_io_reopen_ops_total | Total number of times files have been reopened |
+| rabbitmq_detailed_schema_db_ram_tx_total | Total number of Schema DB memory transactions |
+| rabbitmq_detailed_schema_db_disk_tx_total | Total number of Schema DB disk transactions |
+| rabbitmq_detailed_msg_store_read_total | Total number of Message Store read operations |
+| rabbitmq_detailed_msg_store_write_total | Total number of Message Store write operations |
+| rabbitmq_detailed_queue_index_read_ops_total | Total number of Queue Index read operations |
+| rabbitmq_detailed_queue_index_write_ops_total | Total number of Queue Index write operations |
+| rabbitmq_detailed_queue_index_journal_write_ops_total | Total number of Queue Index Journal write operations |
+| rabbitmq_detailed_io_read_time_seconds_total | Total I/O read time |
+| rabbitmq_detailed_io_write_time_seconds_total | Total I/O write time |
+| rabbitmq_detailed_io_sync_time_seconds_total | Total I/O sync time |
+| rabbitmq_detailed_io_seek_time_seconds_total | Total I/O seek time |
+| rabbitmq_detailed_io_open_attempt_time_seconds_total | Total file open attempts time |
+
+
+#### Raft metrics
+
+Group `ra_metrics`:
+
+| Metric | Description |
+|-----------------------------------------------------|--------------------------------------------|
+| rabbitmq_detailed_raft_term_total | Current Raft term number |
+| rabbitmq_detailed_raft_log_snapshot_index | Raft log snapshot index |
+| rabbitmq_detailed_raft_log_last_applied_index | Raft log last applied index |
+| rabbitmq_detailed_raft_log_commit_index | Raft log commit index |
+| rabbitmq_detailed_raft_log_last_written_index | Raft log last written index |
+| rabbitmq_detailed_raft_entry_commit_latency_seconds | Time taken for a log entry to be committed |
+
+#### Auth metrics
+
+Group `auth_attempt_metrics`:
+
+| Metric | Description |
+|-------------------------------------------------|----------------------------------------------------|
+| rabbitmq_detailed_auth_attempts_total | Total number of authorization attempts |
+| rabbitmq_detailed_auth_attempts_succeeded_total | Total number of successful authentication attempts |
+| rabbitmq_detailed_auth_attempts_failed_total | Total number of failed authentication attempts |
+
+
+Group `auth_attempt_detailed_metrics` (when aggregated, it produces the same numbers as `auth_attempt_metrics` - so it's mutually exclusive with it in the aggregation mode):
+
+| Metric | Description |
+|----------------------------------------------------------|--------------------------------------------------------------------|
+| rabbitmq_detailed_auth_attempts_detailed_total | Total number of authorization attempts with source info |
+| rabbitmq_detailed_auth_attempts_detailed_succeeded_total | Total number of successful authorization attempts with source info |
+| rabbitmq_detailed_auth_attempts_detailed_failed_total | Total number of failed authorization attempts with source info |
+
+
+### Queue metrics
+
+Each of metrics in this group refers to a single queue in its label. Amount of data and performance totally depends on the number of queues.
+
+They are listed from least expensive to collect to the most expensive.
+
+#### Queue coarse metrics
+
+Group `queue_coarse_metrics`:
+
+| Metric | Description |
+|--------------------------------------------------|--------------------------------------------------------------|
+| rabbitmq_detailed_queue_messages_ready | Messages ready to be delivered to consumers |
+| rabbitmq_detailed_queue_messages_unacked | Messages delivered to consumers but not yet acknowledged |
+| rabbitmq_detailed_queue_messages | Sum of ready and unacknowledged messages - total queue depth |
+| rabbitmq_detailed_queue_process_reductions_total | Total number of queue process reductions |
+
+#### Per-queue consumer count
+
+Group `queue_consumer_count`. This is a strict subset of `queue_metrics` which contains only a single metric (if both `queue_consumer_count` and `queue_metrics` are requested, the former will be automatically skipped):
+
+| Metric | Description |
+|-----------------------------------|----------------------|
+| rabbitmq_detailed_queue_consumers | Consumers on a queue |
+
+This is one of the more telling metrics, and having it separately allows to skip some expensive operations for extracting/exposing the other metrics from the same datasource.
+
+#### Detailed queue metrics
+
+Group `queue_metrics` contains all the metrics for every queue, and can be relatively expensive to produce:
+
+| Metric | Description |
+|---------------------------------------------------|------------------------------------------------------------|
+| rabbitmq_detailed_queue_consumers | Consumers on a queue |
+| rabbitmq_detailed_queue_consumer_capacity | Consumer capacity |
+| rabbitmq_detailed_queue_consumer_utilisation | Same as consumer capacity |
+| rabbitmq_detailed_queue_process_memory_bytes | Memory in bytes used by the Erlang queue process |
+| rabbitmq_detailed_queue_messages_ram | Ready and unacknowledged messages stored in memory |
+| rabbitmq_detailed_queue_messages_ram_bytes | Size of ready and unacknowledged messages stored in memory |
+| rabbitmq_detailed_queue_messages_ready_ram | Ready messages stored in memory |
+| rabbitmq_detailed_queue_messages_unacked_ram | Unacknowledged messages stored in memory |
+| rabbitmq_detailed_queue_messages_persistent | Persistent messages |
+| rabbitmq_detailed_queue_messages_persistent_bytes | Size in bytes of persistent messages |
+| rabbitmq_detailed_queue_messages_bytes | Size in bytes of ready and unacknowledged messages |
+| rabbitmq_detailed_queue_messages_ready_bytes | Size in bytes of ready messages |
+| rabbitmq_detailed_queue_messages_unacked_bytes | Size in bytes of all unacknowledged messages |
+| rabbitmq_detailed_queue_messages_paged_out | Messages paged out to disk |
+| rabbitmq_detailed_queue_messages_paged_out_bytes | Size in bytes of messages paged out to disk |
+| rabbitmq_detailed_queue_disk_reads_total | Total number of times queue read messages from disk |
+| rabbitmq_detailed_queue_disk_writes_total | Total number of times queue wrote messages to disk |
+
+Tests show that performance difference between it and `queue_consumer_count` is approximately 8 times. E.g. on a test broker with 10k queues/producers/consumers, scrape time was ~8 second and ~1 respectively. So while it's expensive, it's not prohibitively so - especially compared to other metrics from per-connection/channel groups.
+
+### Connection/channel metrics
+
+All of those include Erlang PID in their label, which is rarely useful when ingested into Prometheus. And they are most expensive to produce, the most resources are spent by `/metrics/per-object` on these.
+
+#### Connection metrics
+
+Group `connection_coarse_metrics`:
+
+| Metric | Description |
+|-------------------------------------------------------|------------------------------------------------|
+| rabbitmq_detailed_connection_incoming_bytes_total | Total number of bytes received on a connection |
+| rabbitmq_detailed_connection_outgoing_bytes_total | Total number of bytes sent on a connection |
+| rabbitmq_detailed_connection_process_reductions_total | Total number of connection process reductions |
+
+Group `connection_metrics`:
+
+| Metric | Description |
+|-----------------------------------------------------|------------------------------------------------------|
+| rabbitmq_detailed_connection_incoming_packets_total | Total number of packets received on a connection |
+| rabbitmq_detailed_connection_outgoing_packets_total | Total number of packets sent on a connection |
+| rabbitmq_detailed_connection_pending_packets | Number of packets waiting to be sent on a connection |
+| rabbitmq_detailed_connection_channels | Channels on a connection |
+
+#### General channel metrics
+
+Group `channel_metrics`:
+
+| Metric | Description |
+|------------------------------------------------|-----------------------------------------------------------------------|
+| rabbitmq_detailed_channel_consumers | Consumers on a channel |
+| rabbitmq_detailed_channel_messages_unacked | Delivered but not yet acknowledged messages |
+| rabbitmq_detailed_channel_messages_unconfirmed | Published but not yet confirmed messages |
+| rabbitmq_detailed_channel_messages_uncommitted | Messages received in a transaction but not yet committed |
+| rabbitmq_detailed_channel_acks_uncommitted | Message acknowledgements in a transaction not yet committed |
+| rabbitmq_detailed_consumer_prefetch | Limit of unacknowledged messages for each consumer |
+| rabbitmq_detailed_channel_prefetch | Total limit of unacknowledged messages for all consumers on a channel |
+
+
+Group `channel_process_metrics`:
+
+| Metric | Description |
+|----------------------------------------------------|--------------------------------------------|
+| rabbitmq_detailed_channel_process_reductions_total | Total number of channel process reductions |
+
+
+#### Channel metrics with queue/exchange breakdowns
+
+Group `channel_exchange_metrics`:
+
+| Metric | Description |
+|--------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------|
+| rabbitmq_detailed_channel_messages_published_total | Total number of messages published into an exchange on a channel |
+| rabbitmq_detailed_channel_messages_confirmed_total | Total number of messages published into an exchange and confirmed on the channel |
+| rabbitmq_detailed_channel_messages_unroutable_returned_total | Total number of messages published as mandatory into an exchange and returned to the publisher as unroutable |
+| rabbitmq_detailed_channel_messages_unroutable_dropped_total | Total number of messages published as non-mandatory into an exchange and dropped as unroutable |
+
+Group `channel_queue_metrics`:
+
+| Metric | Description |
+|--------------------------------------------------------|-----------------------------------------------------------------------------------|
+| rabbitmq_detailed_channel_get_ack_total | Total number of messages fetched with basic.get in manual acknowledgement mode |
+| rabbitmq_detailed_channel_get_total | Total number of messages fetched with basic.get in automatic acknowledgement mode |
+| rabbitmq_detailed_channel_messages_delivered_ack_total | Total number of messages delivered to consumers in manual acknowledgement mode |
+| rabbitmq_detailed_channel_messages_delivered_total | Total number of messages delivered to consumers in automatic acknowledgement mode |
+| rabbitmq_detailed_channel_messages_redelivered_total | Total number of messages redelivered to consumers |
+| rabbitmq_detailed_channel_messages_acked_total | Total number of messages acknowledged by consumers |
+| rabbitmq_detailed_channel_get_empty_total | Total number of times basic.get operations fetched no message |
+
+Group `channel_queue_exchange_metrics`:
+
+| Metric | Description |
+|--------------------------------------------------|----------------------------------------------|
+| rabbitmq_detailed_queue_messages_published_total | Total number of messages published to queues |
+
+### Virtual hosts and exchange metrics
+
+These additional metrics can be useful when virtual hosts or exchanges are
+created on a shared cluster in a self-service way. They are different
+from the rest of the metrics: they are cluster-wide and not node-local.
+These metrics **must not** be aggregated across cluster nodes.
+
+Group `vhost_status`:
+
+| Metric | Description |
+|-------------------------------|----------------------------------|
+| rabbitmq_cluster_vhost_status | Whether a given vhost is running |
+
+Group `exchange_names`:
+
+| Metric | Description |
+|--------------------------------|----------------------------------------------------------------------------------------------------------------------------|
+| rabbitmq_cluster_exchange_name | Enumerates exchanges without any additional info. This value is cluster-wide. A cheaper alternative to `exchange_bindings` |
+
+Group `exchange_bindings`:
+
+| Metric | Description |
+|------------------------------------|-----------------------------------------------------------------|
+| rabbitmq_cluster_exchange_bindings | Number of bindings for an exchange. This value is cluster-wide. |
+
+
+
+
diff --git a/deps/rabbitmq_prometheus/metrics.md b/deps/rabbitmq_prometheus/metrics.md
index ff76d014c3..8a8a643909 100644
--- a/deps/rabbitmq_prometheus/metrics.md
+++ b/deps/rabbitmq_prometheus/metrics.md
@@ -3,8 +3,8 @@
<!-- TOC depthFrom:2 depthTo:6 withLinks:1 updateOnSave:1 orderedList:0 -->
- [RabbitMQ](#rabbitmq)
- - [Global](#global)
- - [Overview](#overview)
+ - [Global Counters](#global-counters)
+ - [Generic](#overview)
- [Connections](#connections)
- [Channels](#channels)
- [Queues](#queues)
@@ -20,99 +20,166 @@
## RabbitMQ
-### Global
-
-| Metric | Description |
-| --- | --- |
-| rabbitmq_consumer_prefetch | Limit of unacknowledged messages for each consumer |
-| rabbitmq_channel_prefetch | Total limit of unacknowledged messages for all consumers on a channel |
-
-### Overview
-
-| Metric | Description |
-| --- | --- |
-| rabbitmq_connections_opened_total | Total number of connections opened |
-| rabbitmq_connections_closed_total | Total number of connections closed or terminated |
-| rabbitmq_channels_opened_total | Total number of channels opened |
-| rabbitmq_channels_closed_total | Total number of channels closed |
-| rabbitmq_queues_declared_total | Total number of queues declared |
-| rabbitmq_queues_created_total | Total number of queues created |
-| rabbitmq_queues_deleted_total | Total number of queues deleted |
-| rabbitmq_process_open_fds | Open file descriptors |
-| rabbitmq_process_open_tcp_sockets | Open TCP sockets |
-| rabbitmq_process_resident_memory_bytes | Memory used in bytes |
-| rabbitmq_disk_space_available_bytes | Disk space available in bytes |
-| rabbitmq_process_max_fds | Open file descriptors limit |
-| rabbitmq_process_max_tcp_sockets | Open TCP sockets limit |
-| rabbitmq_resident_memory_limit_bytes | Memory high watermark in bytes |
-| rabbitmq_disk_space_available_limit_bytes | Free disk space low watermark in bytes |
-| rabbitmq_connections | Connections currently open |
-| rabbitmq_channels | Channels currently open |
-| rabbitmq_consumers | Consumers currently connected |
-| rabbitmq_queues | Queues available |
-| rabbitmq_build_info | RabbitMQ & Erlang/OTP version info |
-| rabbitmq_identity_info | RabbitMQ node & cluster identity info |
+All metrics are in alphabetical order.
+
+### Global Counters
+
+These were introduced to address an inherent flaw with existing counters when
+metrics are aggregated (default behaviour). When connections or channels
+terminate, their metrics get garbage collected (meaning that they disappear
+after a while). All counters that aggregate metrics across connections and
+channels decrease, since the sum is now lower, and that in turn results rate &
+irate functions in Prometheus returning nonsensical values (e.g. 4Mil msg/s).
+This problem is made worse by Streams, since those values **may** be real, but
+we can't trust them, which is the worst place to be in. The Global counters fix
+this, and also introduce per-protocol as well as per-protocol AND queue type
+metrics, which is something that many of you have requested for a while now.
+
+<!--
+To generate these:
+ 1. From within the rabbitmq-server repository, run the following command:
+ make run-broker
+ 2. In vim, position the cursor where you want the metrics importing, then run:
+ r !curl -s localhost:15692/metrics | grep 'HELP rabbitmq_global_'
+ 3. Also in vim, select all formatted lines and run:
+ sort
+ 4. Still in vim, build & run a macro re-formats all other metrics
+ 5. Lastly, visual select all lines in vim and run the following command:
+ Tabularize /|
+-->
+
+| Metric | Description |
+| --- | --- |
+| rabbitmq_global_messages_acknowledged_total | Total number of messages acknowledged by consumers |
+| rabbitmq_global_messages_confirmed_total | Total number of messages confirmed to publishers |
+| rabbitmq_global_messages_delivered_consume_auto_ack_total | Total number of messages delivered to consumers using basic.consume with automatic acknowledgment |
+| rabbitmq_global_messages_delivered_consume_manual_ack_total | Total number of messages delivered to consumers using basic.consume with manual acknowledgment |
+| rabbitmq_global_messages_delivered_get_auto_ack_total | Total number of messages delivered to consumers using basic.get with automatic acknowledgment |
+| rabbitmq_global_messages_delivered_get_manual_ack_total | Total number of messages delivered to consumers using basic.get with manual acknowledgment |
+| rabbitmq_global_messages_delivered_total | Total number of messages delivered to consumers |
+| rabbitmq_global_messages_get_empty_total | Total number of times basic.get operations fetched no message |
+| rabbitmq_global_messages_received_confirm_total | Total number of messages received from publishers expecting confirmations |
+| rabbitmq_global_messages_received_total | Total number of messages received from publishers |
+| rabbitmq_global_messages_redelivered_total | Total number of messages redelivered to consumers |
+| rabbitmq_global_messages_routed_total | Total number of messages routed to queues or streams |
+| rabbitmq_global_messages_unroutable_dropped_total | Total number of messages published as non-mandatory into an exchange and dropped as unroutable |
+| rabbitmq_global_messages_unroutable_returned_total | Total number of messages published as mandatory into an exchange and returned to the publisher as unroutable |
+| rabbitmq_global_publishers | Publishers currently connected |
+| rabbitmq_global_consumers | Consumers currently connected |
+
+#### Stream global counters
+
+These metrics are specific to the stream protocol.
+
+| Metric | Description |
+| --- | --- |
+| stream_error_stream_does_not_exist_total | Total number of commands rejected with stream does not exist error |
+| stream_error_subscription_id_already_exists_total | Total number of commands failed with subscription id already exists |
+| stream_error_subscription_id_does_not_exist_total | Total number of commands failed with subscription id does not exist |
+| stream_error_stream_already_exists_total | Total number of commands failed with stream already exists |
+| stream_error_stream_not_available_total | Total number of commands failed with stream not available |
+| stream_error_sasl_mechanism_not_supported_total | Total number of commands failed with sasl mechanism not supported |
+| stream_error_authentication_failure_total | Total number of commands failed with authentication failure |
+| stream_error_sasl_error_total | Total number of commands failed with sasl error |
+| stream_error_sasl_challenge_total | Total number of commands failed with sasl challenge |
+| stream_error_sasl_authentication_failure_loopback_total | Total number of commands failed with sasl authentication failure loopback |
+| stream_error_vhost_access_failure_total | Total number of commands failed with vhost access failure |
+| stream_error_unknown_frame_total | Total number of commands failed with unknown frame |
+| stream_error_frame_too_large_total | Total number of commands failed with frame too large |
+| stream_error_internal_error_total | Total number of commands failed with internal error |
+| stream_error_access_refused_total | Total number of commands failed with access refused |
+| stream_error_precondition_failed_total | Total number of commands failed with precondition failed |
+| stream_error_publisher_does_not_exist_total | Total number of commands failed with publisher does not exist |
+
+
+### Generic
+
+| Metric | Description |
+| --- | --- |
+| rabbitmq_build_info | RabbitMQ & Erlang/OTP version info |
+| rabbitmq_consumer_prefetch | Limit of unacknowledged messages for each consumer |
+| rabbitmq_consumers | Consumers currently connected |
+| rabbitmq_disk_space_available_bytes | Disk space available in bytes |
+| rabbitmq_disk_space_available_limit_bytes | Free disk space low watermark in bytes |
+| rabbitmq_identity_info | RabbitMQ node & cluster identity info |
+| rabbitmq_process_max_fds | Open file descriptors limit |
+| rabbitmq_process_max_tcp_sockets | Open TCP sockets limit |
+| rabbitmq_process_open_fds | Open file descriptors |
+| rabbitmq_process_open_tcp_sockets | Open TCP sockets |
+| rabbitmq_process_resident_memory_bytes | Memory used in bytes |
+| rabbitmq_resident_memory_limit_bytes | Memory high watermark in bytes |
### Connections
| Metric | Description |
| --- | --- |
+| rabbitmq_connection_channels | Channels on a connection |
| rabbitmq_connection_incoming_bytes_total | Total number of bytes received on a connection |
-| rabbitmq_connection_outgoing_bytes_total | Total number of bytes sent on a connection |
-| rabbitmq_connection_process_reductions_total | Total number of connection process reductions |
| rabbitmq_connection_incoming_packets_total | Total number of packets received on a connection |
+| rabbitmq_connection_outgoing_bytes_total | Total number of bytes sent on a connection |
| rabbitmq_connection_outgoing_packets_total | Total number of packets sent on a connection |
| rabbitmq_connection_pending_packets | Number of packets waiting to be sent on a connection |
-| rabbitmq_connection_channels | Channels on a connection |
+| rabbitmq_connection_process_reductions_total | Total number of connection process reductions |
+| rabbitmq_connections | Connections currently open |
+| rabbitmq_connections_closed_total | Total number of connections closed or terminated |
+| rabbitmq_connections_opened_total | Total number of connections opened |
### Channels
| Metric | Description |
| --- | --- |
-| rabbitmq_channel_consumers | Consumers on a channel |
-| rabbitmq_channel_messages_unacked | Delivered but not yet acknowledged messages |
-| rabbitmq_channel_messages_unconfirmed | Published but not yet confirmed messages |
-| rabbitmq_channel_messages_uncommitted | Messages received in a transaction but not yet committed |
| rabbitmq_channel_acks_uncommitted | Message acknowledgements in a transaction not yet committed |
-| rabbitmq_channel_messages_published_total | Total number of messages published into an exchange on a channel |
-| rabbitmq_channel_messages_confirmed_total | Total number of messages published into an exchange and confirmed on the channel |
-| rabbitmq_channel_messages_unroutable_returned_total | Total number of messages published as mandatory into an exchange and returned to the publisher as unroutable |
-| rabbitmq_channel_messages_unroutable_dropped_total | Total number of messages published as non-mandatory into an exchange and dropped as unroutable |
-| rabbitmq_channel_process_reductions_total | Total number of channel process reductions |
+| rabbitmq_channel_consumers | Consumers on a channel |
| rabbitmq_channel_get_ack_total | Total number of messages fetched with basic.get in manual acknowledgement mode |
+| rabbitmq_channel_get_empty_total | Total number of times basic.get operations fetched no message |
| rabbitmq_channel_get_total | Total number of messages fetched with basic.get in automatic acknowledgement mode |
+| rabbitmq_channel_messages_acked_total | Total number of messages acknowledged by consumers |
+| rabbitmq_channel_messages_confirmed_total | Total number of messages published into an exchange and confirmed on the channel |
| rabbitmq_channel_messages_delivered_ack_total | Total number of messages delivered to consumers in manual acknowledgement mode |
| rabbitmq_channel_messages_delivered_total | Total number of messages delivered to consumers in automatic acknowledgement mode |
+| rabbitmq_channel_messages_published_total | Total number of messages published into an exchange on a channel |
| rabbitmq_channel_messages_redelivered_total | Total number of messages redelivered to consumers |
-| rabbitmq_channel_messages_acked_total | Total number of messages acknowledged by consumers |
-| rabbitmq_channel_get_empty_total | Total number of times basic.get operations fetched no message |
+| rabbitmq_channel_messages_unacked | Delivered but not yet acknowledged messages |
+| rabbitmq_channel_messages_uncommitted | Messages received in a transaction but not yet committed |
+| rabbitmq_channel_messages_unconfirmed | Published but not yet confirmed messages |
+| rabbitmq_channel_messages_unroutable_dropped_total | Total number of messages published as non-mandatory into an exchange and dropped as unroutable |
+| rabbitmq_channel_messages_unroutable_returned_total | Total number of messages published as mandatory into an exchange and returned to the publisher as unroutable |
+| rabbitmq_channel_prefetch | Total limit of unacknowledged messages for all consumers on a channel |
+| rabbitmq_channel_process_reductions_total | Total number of channel process reductions |
+| rabbitmq_channels | Channels currently open |
+| rabbitmq_channels_closed_total | Total number of channels closed |
+| rabbitmq_channels_opened_total | Total number of channels opened |
### Queues
-| Metric | Description |
-| --- | --- |
-| rabbitmq_queue_messages_published_total | Total number of messages published to queues |
-| rabbitmq_queue_messages_ready | Messages ready to be delivered to consumers |
-| rabbitmq_queue_messages_unacked | Messages delivered to consumers but not yet acknowledged |
-| rabbitmq_queue_messages | Sum of ready and unacknowledged messages - total queue depth |
-| rabbitmq_queue_process_reductions_total | Total number of queue process reductions |
-| rabbitmq_queue_consumers | Consumers on a queue |
-| rabbitmq_queue_consumer_utilisation | Consumer utilisation |
-| rabbitmq_queue_process_memory_bytes | Memory in bytes used by the Erlang queue process |
-| rabbitmq_queue_messages_ram | Ready and unacknowledged messages stored in memory |
-| rabbitmq_queue_messages_ram_bytes | Size of ready and unacknowledged messages stored in memory |
-| rabbitmq_queue_messages_ready_ram | Ready messages stored in memory |
-| rabbitmq_queue_messages_unacked_ram | Unacknowledged messages stored in memory |
-| rabbitmq_queue_messages_persistent | Persistent messages |
-| rabbitmq_queue_messages_persistent_bytes | Size in bytes of persistent messages |
-| rabbitmq_queue_messages_bytes | Size in bytes of ready and unacknowledged messages |
-| rabbitmq_queue_messages_ready_bytes | Size in bytes of ready messages |
-| rabbitmq_queue_messages_unacked_bytes | Size in bytes of all unacknowledged messages |
-| rabbitmq_queue_messages_paged_out | Messages paged out to disk |
-| rabbitmq_queue_messages_paged_out_bytes | Size in bytes of messages paged out to disk |
-| rabbitmq_queue_disk_reads_total | Total number of times queue read messages from disk |
-| rabbitmq_queue_disk_writes_total | Total number of times queue wrote messages to disk |
+| Metric | Description |
+| --- | --- |
+| rabbitmq_queue_consumer_utilisation | Consumer utilisation |
+| rabbitmq_queue_consumers | Consumers on a queue |
+| rabbitmq_queue_disk_reads_total | Total number of times queue read messages from disk |
+| rabbitmq_queue_disk_writes_total | Total number of times queue wrote messages to disk |
+| rabbitmq_queue_messages | Sum of ready and unacknowledged messages - total queue depth |
+| rabbitmq_queue_messages_bytes | Size in bytes of ready and unacknowledged messages |
+| rabbitmq_queue_messages_paged_out | Messages paged out to disk |
+| rabbitmq_queue_messages_paged_out_bytes | Size in bytes of messages paged out to disk |
+| rabbitmq_queue_messages_persistent | Persistent messages |
+| rabbitmq_queue_messages_persistent_bytes | Size in bytes of persistent messages |
+| rabbitmq_queue_messages_published_total | Total number of messages published to queues |
+| rabbitmq_queue_messages_ram | Ready and unacknowledged messages stored in memory |
+| rabbitmq_queue_messages_ram_bytes | Size of ready and unacknowledged messages stored in memory |
+| rabbitmq_queue_messages_ready | Messages ready to be delivered to consumers |
+| rabbitmq_queue_messages_ready_bytes | Size in bytes of ready messages |
+| rabbitmq_queue_messages_ready_ram | Ready messages stored in memory |
+| rabbitmq_queue_messages_unacked | Messages delivered to consumers but not yet acknowledged |
+| rabbitmq_queue_messages_unacked_bytes | Size in bytes of all unacknowledged messages |
+| rabbitmq_queue_messages_unacked_ram | Unacknowledged messages stored in memory |
+| rabbitmq_queue_process_memory_bytes | Memory in bytes used by the Erlang queue process |
+| rabbitmq_queue_process_reductions_total | Total number of queue process reductions |
+| rabbitmq_queues | Queues available |
+| rabbitmq_queues_created_total | Total number of queues created |
+| rabbitmq_queues_declared_total | Total number of queues declared |
+| rabbitmq_queues_deleted_total | Total number of queues deleted |
@@ -120,59 +187,58 @@
| Metric | Description |
| --- | --- |
-| rabbitmq_erlang_processes_used | Erlang processes used |
-| rabbitmq_erlang_gc_runs_total | Total number of Erlang garbage collector runs |
| rabbitmq_erlang_gc_reclaimed_bytes_totalTotal | number of bytes of memory reclaimed by Erlang garbage collector |
-| rabbitmq_erlang_scheduler_context_switches_total | Total number of Erlang scheduler context switches |
+| rabbitmq_erlang_gc_runs_total | Total number of Erlang garbage collector runs |
+| rabbitmq_erlang_net_ticktime_seconds | Inter-node heartbeat interval in seconds |
| rabbitmq_erlang_processes_limit | Erlang processes limit |
+| rabbitmq_erlang_processes_used | Erlang processes used |
+| rabbitmq_erlang_scheduler_context_switches_total | Total number of Erlang scheduler context switches |
| rabbitmq_erlang_scheduler_run_queue | Erlang scheduler run queue |
-| rabbitmq_erlang_net_ticktime_seconds | Inter-node heartbeat interval in seconds |
| rabbitmq_erlang_uptime_seconds | Node uptime |
-
### Disk IO
| Metric | Description |
| --- | --- |
-| rabbitmq_io_read_ops_total | Total number of I/O read operations |
-| rabbitmq_io_read_bytes_total | Total number of I/O bytes read |
-| rabbitmq_io_write_ops_total | Total number of I/O write operations |
-| rabbitmq_io_write_bytes_total | Total number of I/O bytes written |
-| rabbitmq_io_sync_ops_total | Total number of I/O sync operations |
-| rabbitmq_io_seek_ops_total | Total number of I/O seek operations |
| rabbitmq_io_open_attempt_ops_total | Total number of file open attempts |
+| rabbitmq_io_open_attempt_time_seconds_total | Total file open attempts time |
+| rabbitmq_io_read_bytes_total | Total number of I/O bytes read |
+| rabbitmq_io_read_ops_total | Total number of I/O read operations |
+| rabbitmq_io_read_time_seconds_total | Total I/O read time |
| rabbitmq_io_reopen_ops_total | Total number of times files have been reopened |
-| rabbitmq_schema_db_ram_tx_total | Total number of Schema DB memory transactions |
-| rabbitmq_schema_db_disk_tx_total | Total number of Schema DB disk transactions |
+| rabbitmq_io_seek_ops_total | Total number of I/O seek operations |
+| rabbitmq_io_seek_time_seconds_total | Total I/O seek time |
+| rabbitmq_io_sync_ops_total | Total number of I/O sync operations |
+| rabbitmq_io_sync_time_seconds_total | Total I/O sync time |
+| rabbitmq_io_write_bytes_total | Total number of I/O bytes written |
+| rabbitmq_io_write_ops_total | Total number of I/O write operations |
+| rabbitmq_io_write_time_seconds_total | Total I/O write time |
| rabbitmq_msg_store_read_total | Total number of Message Store read operations |
| rabbitmq_msg_store_write_total | Total number of Message Store write operations |
+| rabbitmq_queue_index_journal_write_ops_total | Total number of Queue Index Journal write operations |
| rabbitmq_queue_index_read_ops_total | Total number of Queue Index read operations |
| rabbitmq_queue_index_write_ops_total | Total number of Queue Index write operations |
-| rabbitmq_queue_index_journal_write_ops_total | Total number of Queue Index Journal write operations |
-| rabbitmq_io_read_time_seconds_total | Total I/O read time |
-| rabbitmq_io_write_time_seconds_total | Total I/O write time |
-| rabbitmq_io_sync_time_seconds_total | Total I/O sync time |
-| rabbitmq_io_seek_time_seconds_total | Total I/O seek time |
-| rabbitmq_io_open_attempt_time_seconds_total | Total file open attempts time |
+| rabbitmq_schema_db_disk_tx_total | Total number of Schema DB disk transactions |
+| rabbitmq_schema_db_ram_tx_total | Total number of Schema DB memory transactions |
### Raft
| Metric | Description |
| --- | --- |
-| rabbitmq_raft_term_total | Current Raft term number |
-| rabbitmq_raft_log_snapshot_index | Raft log snapshot index |
-| rabbitmq_raft_log_last_applied_index | Raft log last applied index |
+| rabbitmq_raft_entry_commit_latency_seconds | Time taken for an entry to be committed |
| rabbitmq_raft_log_commit_index | Raft log commit index |
+| rabbitmq_raft_log_last_applied_index | Raft log last applied index |
| rabbitmq_raft_log_last_written_index | Raft log last written index |
-| rabbitmq_raft_entry_commit_latency_seconds | Time taken for an entry to be committed |
+| rabbitmq_raft_log_snapshot_index | Raft log snapshot index |
+| rabbitmq_raft_term_total | Current Raft term number |
## Telemetry
| Metric | Description |
| --- | --- |
+| telemetry_scrape_duration_seconds | Scrape duration |
| telemetry_scrape_encoded_size_bytes | Scrape size, encoded |
| telemetry_scrape_size_bytes | Scrape size, not encoded |
-| telemetry_scrape_duration_seconds | Scrape duration |
## Erlang
@@ -180,81 +246,81 @@
| Metric | Description |
| --- | --- |
+| erlang_mnesia_committed_transactions | Number of committed transactions |
+| erlang_mnesia_failed_transactions | Number of failed (i.e. aborted) transactions |
| erlang_mnesia_held_locks | Number of held locks |
| erlang_mnesia_lock_queue | Number of transactions waiting for a lock |
-| erlang_mnesia_transaction_participants | Number of participant transactions |
-| erlang_mnesia_transaction_coordinators | Number of coordinator transactions |
-| erlang_mnesia_failed_transactions | Number of failed (i.e. aborted) transactions |
-| erlang_mnesia_committed_transactions | Number of committed transactions |
| erlang_mnesia_logged_transactions | Number of transactions logged |
| erlang_mnesia_restarted_transactions | Total number of transaction restarts |
+| erlang_mnesia_transaction_coordinators | Number of coordinator transactions |
+| erlang_mnesia_transaction_participants | Number of participant transactions |
### VM
| Metric | Description |
| --- | --- |
-| erlang_vm_dist_recv_bytes | Number of bytes received by the socket. |
-| erlang_vm_dist_recv_cnt | Number of packets received by the socket. |
-| erlang_vm_dist_recv_max_bytes | Size of the largest packet, in bytes, received by the socket. |
-| erlang_vm_dist_recv_avg_bytes | Average size of packets, in bytes, received by the socket. |
-| erlang_vm_dist_recv_dvi_bytes | Average packet size deviation, in bytes, received by the socket. |
-| erlang_vm_dist_send_bytes | Number of bytes sent from the socket. |
-| erlang_vm_dist_send_cnt | Number of packets sent from the socket. |
-| erlang_vm_dist_send_max_bytes | Size of the largest packet, in bytes, sent from the socket. |
-| erlang_vm_dist_send_avg_bytes | Average size of packets, in bytes, sent from the socket. |
-| erlang_vm_dist_send_pend_bytes | Number of bytes waiting to be sent by the socket. |
+| erlang_vm_allocators | Allocated (carriers_size) and used (blocks_size) memory for the different allocators in the VM. See erts_alloc(3). |
+| erlang_vm_atom_count | The number of atom currently existing at the local node. |
+| erlang_vm_atom_limit | The maximum number of simultaneously existing atom at the local node. |
+| erlang_vm_dirty_cpu_schedulers | The number of scheduler dirty CPU scheduler threads used by the emulator. |
+| erlang_vm_dirty_cpu_schedulers_online | The number of dirty CPU scheduler threads online. |
+| erlang_vm_dirty_io_schedulers | The number of scheduler dirty I/O scheduler threads used by the emulator. |
+| erlang_vm_dist_node_queue_size_bytes | The number of bytes in the output distribution queue. This queue sits between the Erlang code and the port driver. |
+| erlang_vm_dist_node_state | The current state of the distribution link. The state is represented as a numerical value where `pending=1', `up_pending=2' and `up=3'. |
| erlang_vm_dist_port_input_bytes | The total number of bytes read from the port. |
-| erlang_vm_dist_port_output_bytes | The total number of bytes written to the port. |
| erlang_vm_dist_port_memory_bytes | The total number of bytes allocated for this port by the runtime system. The port itself can have allocated memory that is not included. |
+| erlang_vm_dist_port_output_bytes | The total number of bytes written to the port. |
| erlang_vm_dist_port_queue_size_bytes | The total number of bytes queued by the port using the ERTS driver queue implementation. |
-| erlang_vm_dist_proc_memory_bytes | The size in bytes of the process. This includes call stack, heap, and internal structures. |
| erlang_vm_dist_proc_heap_size_words | The size in words of the youngest heap generation of the process. This generation includes the process stack. This information is highly implementation-dependent, and can change if the implementation changes. |
-| erlang_vm_dist_proc_min_heap_size_words | The minimum heap size for the process. |
-| erlang_vm_dist_proc_min_bin_vheap_size_words | The minimum binary virtual heap size for the process. |
-| erlang_vm_dist_proc_stack_size_words | The stack size, in words, of the process. |
-| erlang_vm_dist_proc_total_heap_size_words | The total size, in words, of all heap fragments of the process. This includes the process stack and any unreceived messages that are considered to be part of the heap. |
+| erlang_vm_dist_proc_memory_bytes | The size in bytes of the process. This includes call stack, heap, and internal structures. |
| erlang_vm_dist_proc_message_queue_len | The number of messages currently in the message queue of the process. |
+| erlang_vm_dist_proc_min_bin_vheap_size_words | The minimum binary virtual heap size for the process. |
+| erlang_vm_dist_proc_min_heap_size_words | The minimum heap size for the process. |
| erlang_vm_dist_proc_reductions | The number of reductions executed by the process. |
+| erlang_vm_dist_proc_stack_size_words | The stack size, in words, of the process. |
| erlang_vm_dist_proc_status | The current status of the distribution process. The status is represented as a numerical value where `exiting=1', `suspended=2', `runnable=3', `garbage_collecting=4', `running=5' and `waiting=6'. |
-| erlang_vm_dist_node_state | The current state of the distribution link. The state is represented as a numerical value where `pending=1', `up_pending=2' and `up=3'. |
-| erlang_vm_dist_node_queue_size_bytes | The number of bytes in the output distribution queue. This queue sits between the Erlang code and the port driver. |
+| erlang_vm_dist_proc_total_heap_size_words | The total size, in words, of all heap fragments of the process. This includes the process stack and any unreceived messages that are considered to be part of the heap. |
+| erlang_vm_dist_recv_avg_bytes | Average size of packets, in bytes, received by the socket. |
+| erlang_vm_dist_recv_bytes | Number of bytes received by the socket. |
+| erlang_vm_dist_recv_cnt | Number of packets received by the socket. |
+| erlang_vm_dist_recv_dvi_bytes | Average packet size deviation, in bytes, received by the socket. |
+| erlang_vm_dist_recv_max_bytes | Size of the largest packet, in bytes, received by the socket. |
+| erlang_vm_dist_send_avg_bytes | Average size of packets, in bytes, sent from the socket. |
+| erlang_vm_dist_send_bytes | Number of bytes sent from the socket. |
+| erlang_vm_dist_send_cnt | Number of packets sent from the socket. |
+| erlang_vm_dist_send_max_bytes | Size of the largest packet, in bytes, sent from the socket. |
+| erlang_vm_dist_send_pend_bytes | Number of bytes waiting to be sent by the socket. |
+| erlang_vm_ets_limit | The maximum number of ETS tables allowed. |
+| erlang_vm_logical_processors | The detected number of logical processors configured in the system. |
+| erlang_vm_logical_processors_available | The detected number of logical processors available to the Erlang runtime system. |
+| erlang_vm_logical_processors_online | The detected number of logical processors online on the system. |
| erlang_vm_memory_atom_bytes_total | The total amount of memory currently allocated for atoms. This memory is part of the memory presented as system memory. |
| erlang_vm_memory_bytes_total | The total amount of memory currently allocated. This is the same as the sum of the memory size for processes and system. |
| erlang_vm_memory_dets_tables | Erlang VM DETS Tables count. |
| erlang_vm_memory_ets_tables | Erlang VM ETS Tables count. |
| erlang_vm_memory_processes_bytes_total | The total amount of memory currently allocated for the Erlang processes. |
| erlang_vm_memory_system_bytes_total | The total amount of memory currently allocated for the emulator that is not directly related to any Erlang process. Memory presented as processes is not included in this memory. |
+| erlang_vm_port_count | The number of ports currently existing at the local node. |
+| erlang_vm_port_limit | The maximum number of simultaneously existing ports at the local node. |
+| erlang_vm_process_count | The number of processes currently existing at the local node. |
+| erlang_vm_process_limit | The maximum number of simultaneously existing processes at the local node. |
+| erlang_vm_schedulers | The number of scheduler threads used by the emulator. |
+| erlang_vm_schedulers_online | The number of schedulers online. |
+| erlang_vm_smp_support | 1 if the emulator has been compiled with SMP support, otherwise 0. |
| erlang_vm_statistics_bytes_output_total | Total number of bytes output to ports. |
| erlang_vm_statistics_bytes_received_total | Total number of bytes received through ports. |
| erlang_vm_statistics_context_switches | Total number of context switches since the system started. |
| erlang_vm_statistics_dirty_cpu_run_queue_length | Length of the dirty CPU run-queue. |
| erlang_vm_statistics_dirty_io_run_queue_length | Length of the dirty IO run-queue. |
-| erlang_vm_statistics_garbage_collection_number_of_gcs | Garbage collection: number of GCs. |
| erlang_vm_statistics_garbage_collection_bytes_reclaimed | Garbage collection: bytes reclaimed. |
+| erlang_vm_statistics_garbage_collection_number_of_gcs | Garbage collection: number of GCs. |
| erlang_vm_statistics_garbage_collection_words_reclaimed | Garbage collection: words reclaimed. |
| erlang_vm_statistics_reductions_total | Total reductions. |
| erlang_vm_statistics_run_queues_length_total | Length of normal run-queues. |
-| erlang_vm_statistics_wallclock_time_milliseconds | Information about wall clock. Same as erlang_vm_statistics_runtime_milliseconds except that real time is measured. |
| erlang_vm_statistics_runtime_milliseconds | The sum of the runtime for all threads in the Erlang runtime system. Can be greater than wall clock time. |
| erlang_vm_statistics_wallclock_time_milliseconds | Information about wall clock. Same as erlang_vm_statistics_runtime_milliseconds except that real time is measured. |
-| erlang_vm_dirty_cpu_schedulers | The number of scheduler dirty CPU scheduler threads used by the emulator. |
-| erlang_vm_dirty_cpu_schedulers_online | The number of dirty CPU scheduler threads online. |
-| erlang_vm_dirty_io_schedulers | The number of scheduler dirty I/O scheduler threads used by the emulator. |
-| erlang_vm_ets_limit | The maximum number of ETS tables allowed. |
-| erlang_vm_logical_processors | The detected number of logical processors configured in the system. |
-| erlang_vm_logical_processors_available | The detected number of logical processors available to the Erlang runtime system. |
-| erlang_vm_logical_processors_online | The detected number of logical processors online on the system. |
-| erlang_vm_port_count | The number of ports currently existing at the local node. |
-| erlang_vm_port_limit | The maximum number of simultaneously existing ports at the local node. |
-| erlang_vm_process_count | The number of processes currently existing at the local node. |
-| erlang_vm_process_limit | The maximum number of simultaneously existing processes at the local node. |
-| erlang_vm_schedulers | The number of scheduler threads used by the emulator. |
-| erlang_vm_schedulers_online | The number of schedulers online. |
-| erlang_vm_smp_support | 1 if the emulator has been compiled with SMP support, otherwise 0. |
-| erlang_vm_threads | 1 if the emulator has been compiled with thread support, otherwise 0. |
+| erlang_vm_statistics_wallclock_time_milliseconds | Information about wall clock. Same as erlang_vm_statistics_runtime_milliseconds except that real time is measured. |
| erlang_vm_thread_pool_size | The number of async threads in the async thread pool used for asynchronous driver calls. |
+| erlang_vm_threads | 1 if the emulator has been compiled with thread support, otherwise 0. |
| erlang_vm_time_correction | 1 if time correction is enabled, otherwise 0. |
-| erlang_vm_atom_count | The number of atom currently existing at the local node. |
-| erlang_vm_atom_limit | The maximum number of simultaneously existing atom at the local node. |
-| erlang_vm_allocators | Allocated (carriers_size) and used (blocks_size) memory for the different allocators in the VM. See erts_alloc(3). |
diff --git a/deps/rabbitmq_prometheus/priv/schema/rabbitmq_prometheus.schema b/deps/rabbitmq_prometheus/priv/schema/rabbitmq_prometheus.schema
index bdef14782b..0ba11713f6 100644
--- a/deps/rabbitmq_prometheus/priv/schema/rabbitmq_prometheus.schema
+++ b/deps/rabbitmq_prometheus/priv/schema/rabbitmq_prometheus.schema
@@ -62,52 +62,53 @@
[{datatype, integer}]}.
{mapping, "prometheus.ssl.ip", "rabbitmq_prometheus.ssl_config.ip",
[{datatype, string}, {validators, ["is_ip"]}]}.
-{mapping, "prometheus.ssl.certfile", "rabbitmq_prometheus.ssl_config.certfile",
+
+{mapping, "prometheus.ssl.certfile", "rabbitmq_prometheus.ssl_config.ssl_opts.certfile",
[{datatype, string}, {validators, ["file_accessible"]}]}.
-{mapping, "prometheus.ssl.keyfile", "rabbitmq_prometheus.ssl_config.keyfile",
+{mapping, "prometheus.ssl.keyfile", "rabbitmq_prometheus.ssl_config.ssl_opts.keyfile",
[{datatype, string}, {validators, ["file_accessible"]}]}.
-{mapping, "prometheus.ssl.cacertfile", "rabbitmq_prometheus.ssl_config.cacertfile",
+{mapping, "prometheus.ssl.cacertfile", "rabbitmq_prometheus.ssl_config.ssl_opts.cacertfile",
[{datatype, string}, {validators, ["file_accessible"]}]}.
-{mapping, "prometheus.ssl.password", "rabbitmq_prometheus.ssl_config.password",
+{mapping, "prometheus.ssl.password", "rabbitmq_prometheus.ssl_config.ssl_opts.password",
[{datatype, string}]}.
-{mapping, "prometheus.ssl.verify", "rabbitmq_prometheus.ssl_config.verify", [
+{mapping, "prometheus.ssl.verify", "rabbitmq_prometheus.ssl_config.ssl_opts.verify", [
{datatype, {enum, [verify_peer, verify_none]}}]}.
-{mapping, "prometheus.ssl.fail_if_no_peer_cert", "rabbitmq_prometheus.ssl_config.fail_if_no_peer_cert", [
+{mapping, "prometheus.ssl.fail_if_no_peer_cert", "rabbitmq_prometheus.ssl_config.ssl_opts.fail_if_no_peer_cert", [
{datatype, {enum, [true, false]}}]}.
-{mapping, "prometheus.ssl.honor_cipher_order", "rabbitmq_prometheus.ssl_config.honor_cipher_order",
+{mapping, "prometheus.ssl.honor_cipher_order", "rabbitmq_prometheus.ssl_config.ssl_opts.honor_cipher_order",
[{datatype, {enum, [true, false]}}]}.
-{mapping, "prometheus.ssl.honor_ecc_order", "rabbitmq_prometheus.ssl_config.honor_ecc_order",
+{mapping, "prometheus.ssl.honor_ecc_order", "rabbitmq_prometheus.ssl_config.ssl_opts.honor_ecc_order",
[{datatype, {enum, [true, false]}}]}.
-{mapping, "prometheus.ssl.reuse_sessions", "rabbitmq_prometheus.ssl_config.reuse_sessions",
+{mapping, "prometheus.ssl.reuse_sessions", "rabbitmq_prometheus.ssl_config.ssl_opts.reuse_sessions",
[{datatype, {enum, [true, false]}}]}.
-{mapping, "prometheus.ssl.secure_renegotiate", "rabbitmq_prometheus.ssl_config.secure_renegotiate",
+{mapping, "prometheus.ssl.secure_renegotiate", "rabbitmq_prometheus.ssl_config.ssl_opts.secure_renegotiate",
[{datatype, {enum, [true, false]}}]}.
-{mapping, "prometheus.ssl.client_renegotiation", "rabbitmq_prometheus.ssl_config.client_renegotiation",
+{mapping, "prometheus.ssl.client_renegotiation", "rabbitmq_prometheus.ssl_config.ssl_opts.client_renegotiation",
[{datatype, {enum, [true, false]}}]}.
-{mapping, "prometheus.ssl.depth", "rabbitmq_prometheus.ssl_config.depth",
+{mapping, "prometheus.ssl.depth", "rabbitmq_prometheus.ssl_config.ssl_opts.depth",
[{datatype, integer}, {validators, ["byte"]}]}.
-{mapping, "prometheus.ssl.versions.$version", "rabbitmq_prometheus.ssl_config.versions",
+{mapping, "prometheus.ssl.versions.$version", "rabbitmq_prometheus.ssl_config.ssl_opts.versions",
[{datatype, atom}]}.
-{translation, "rabbitmq_prometheus.ssl_config.versions",
+{translation, "rabbitmq_prometheus.ssl_config.ssl_opts.versions",
fun(Conf) ->
Settings = cuttlefish_variable:filter_by_prefix("prometheus.ssl.versions", Conf),
[V || {_, V} <- Settings]
end}.
-{mapping, "prometheus.ssl.ciphers.$cipher", "rabbitmq_prometheus.ssl_config.ciphers",
+{mapping, "prometheus.ssl.ciphers.$cipher", "rabbitmq_prometheus.ssl_config.ssl_opts.ciphers",
[{datatype, string}]}.
-{translation, "rabbitmq_prometheus.ssl_config.ciphers",
+{translation, "rabbitmq_prometheus.ssl_config.ssl_opts.ciphers",
fun(Conf) ->
Settings = cuttlefish_variable:filter_by_prefix("prometheus.ssl.ciphers", Conf),
lists:reverse([V || {_, V} <- Settings])
@@ -125,3 +126,5 @@ end}.
[{datatype, integer}, {validators, ["non_negative_integer"]}]}.
{mapping, "prometheus.ssl.max_keepalive", "rabbitmq_prometheus.ssl_config.cowboy_opts.max_keepalive",
[{datatype, integer}, {validators, ["non_negative_integer"]}]}.
+
+{mapping, "prometheus.filter_aggregated_queue_metrics_pattern", "rabbitmq_prometheus.filter_aggregated_queue_metrics_pattern", [{datatype, string}]}. \ No newline at end of file
diff --git a/deps/rabbitmq_prometheus/rabbitmq-components.mk b/deps/rabbitmq_prometheus/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_prometheus/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_alarm_metrics_collector.erl b/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_alarm_metrics_collector.erl
new file mode 100644
index 0000000000..b750aa0f6a
--- /dev/null
+++ b/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_alarm_metrics_collector.erl
@@ -0,0 +1,80 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+-module(prometheus_rabbitmq_alarm_metrics_collector).
+
+-export([register/0, deregister_cleanup/1, collect_mf/2]).
+
+-import(prometheus_model_helpers, [create_mf/4, untyped_metric/1]).
+
+-include_lib("prometheus/include/prometheus.hrl").
+
+-behaviour(prometheus_collector).
+
+-define(METRIC_NAME_PREFIX, "rabbitmq_alarms_").
+
+%%====================================================================
+%% Collector API
+%%====================================================================
+
+register() ->
+ ok = prometheus_registry:register_collector(?MODULE).
+
+deregister_cleanup(_) ->
+ ok.
+
+-spec collect_mf(_Registry, Callback) -> ok
+ when _Registry :: prometheus_registry:registry(),
+ Callback :: prometheus_collector:callback().
+collect_mf(_Registry, Callback) ->
+ try
+ case rabbit_alarm:get_local_alarms(500) %% TODO: figure out timeout
+ of
+ Alarms when is_list(Alarms) ->
+ ActiveAlarms =
+ lists:foldl(fun ({{resource_limit, disk, _}, _}, Acc) ->
+ maps:put(disk_limit, 1, Acc);
+ ({{resource_limit, memory, _}, _}, Acc) ->
+ maps:put(memory_limit, 1, Acc);
+ ({file_descriptor_limit, _}, Acc) ->
+ maps:put(file_descriptor_limit, 1, Acc)
+ end,
+ #{},
+ Alarms),
+
+ Callback(create_mf(?METRIC_NAME(<<"file_descriptor_limit">>),
+ <<"is 1 if file descriptor limit alarm is in effect">>,
+ untyped,
+ [untyped_metric(maps:get(file_descriptor_limit,
+ ActiveAlarms,
+ 0))])),
+ Callback(create_mf(?METRIC_NAME(<<"free_disk_space_watermark">>),
+ <<"is 1 if free disk space watermark alarm is in effect">>,
+ untyped,
+ [untyped_metric(maps:get(disk_limit, ActiveAlarms, 0))])),
+ Callback(create_mf(?METRIC_NAME(<<"memory_used_watermark">>),
+ <<"is 1 if VM memory watermark alarm is in effect">>,
+ untyped,
+ [untyped_metric(maps:get(memory_limit, ActiveAlarms, 0))])),
+ ok;
+ Error ->
+ rabbit_log:error("alarm_metrics_collector failed to emit metrics: "
+ "rabbitm_alarm:get_local_alarms returned ~p",
+ [Error]),
+ %% We are not going to render any alarm metrics here.
+ %% Breaks continuity but at least doesn't crash the
+ %% whole scraping endpoint
+ ok
+ end
+ catch
+ exit:{timeout, _} ->
+ rabbit_log:error("alarm_metrics_collector failed to emit metrics: "
+ "rabbitm_alarm:get_local_alarms timed out"),
+ %% We are not going to render any alarm metrics here.
+ %% Breaks continuity but at least doesn't crash the
+ %% whole scraping endpoint
+ ok
+ end.
diff --git a/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_core_metrics_collector.erl b/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_core_metrics_collector.erl
index 255260627a..7c31b71b92 100644
--- a/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_core_metrics_collector.erl
+++ b/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_core_metrics_collector.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(prometheus_rabbitmq_core_metrics_collector).
-export([register/0,
@@ -20,13 +20,16 @@
-include_lib("rabbit_common/include/rabbit.hrl").
-behaviour(prometheus_collector).
-
-%% Because all metrics are from RabbitMQ's perspective,
-%% cached for up to 5 seconds by default (configurable),
-%% we prepend rabbitmq_ to all metrics emitted by this collector.
-%% Some metrics are for Erlang (erlang_), Mnesia (schema_db_) or the System (io_),
+%% We prepend either rabbitmq_ or rabbitmq_detailed_ to all metrics emitted by this collector.
+%% As there are also some metrics for Erlang (erlang_), Mnesia (schema_db_) or the System (io_),
%% as observed by RabbitMQ.
--define(METRIC_NAME_PREFIX, "rabbitmq_").
+
+%% Used by `/metrics` and `/metrics/per-object`.
+-define(METRIC_NAME_PREFIX, <<"rabbitmq_">>).
+
+%% Used by `/metrics/detailed` endpoint
+-define(DETAILED_METRIC_NAME_PREFIX, <<"rabbitmq_detailed_">>).
+-define(CLUSTER_METRIC_NAME_PREFIX, <<"rabbitmq_cluster_">>).
%% ==The source of these metrics can be found in the rabbit_core_metrics module==
%% The relevant files are:
@@ -52,37 +55,8 @@
-define(MICROSECOND, 1000000).
-define(METRICS_RAW, [
- {channel_metrics, [
- {2, undefined, channel_consumers, gauge, "Consumers on a channel", consumer_count},
- {2, undefined, channel_messages_unacked, gauge, "Delivered but not yet acknowledged messages", messages_unacknowledged},
- {2, undefined, channel_messages_unconfirmed, gauge, "Published but not yet confirmed messages", messages_unconfirmed},
- {2, undefined, channel_messages_uncommitted, gauge, "Messages received in a transaction but not yet committed", messages_uncommitted},
- {2, undefined, channel_acks_uncommitted, gauge, "Message acknowledgements in a transaction not yet committed", acks_uncommitted},
- {2, undefined, consumer_prefetch, gauge, "Limit of unacknowledged messages for each consumer", prefetch_count},
- {2, undefined, channel_prefetch, gauge, "Total limit of unacknowledged messages for all consumers on a channel", global_prefetch_count}
- ]},
-
- {channel_exchange_metrics, [
- {2, undefined, channel_messages_published_total, counter, "Total number of messages published into an exchange on a channel"},
- {3, undefined, channel_messages_confirmed_total, counter, "Total number of messages published into an exchange and confirmed on the channel"},
- {4, undefined, channel_messages_unroutable_returned_total, counter, "Total number of messages published as mandatory into an exchange and returned to the publisher as unroutable"},
- {5, undefined, channel_messages_unroutable_dropped_total, counter, "Total number of messages published as non-mandatory into an exchange and dropped as unroutable"}
- ]},
-
- {channel_process_metrics, [
- {2, undefined, channel_process_reductions_total, counter, "Total number of channel process reductions"}
- ]},
-
- {channel_queue_metrics, [
- {2, undefined, channel_get_ack_total, counter, "Total number of messages fetched with basic.get in manual acknowledgement mode"},
- {3, undefined, channel_get_total, counter, "Total number of messages fetched with basic.get in automatic acknowledgement mode"},
- {4, undefined, channel_messages_delivered_ack_total, counter, "Total number of messages delivered to consumers in manual acknowledgement mode"},
- {5, undefined, channel_messages_delivered_total, counter, "Total number of messages delivered to consumers in automatic acknowledgement mode"},
- {6, undefined, channel_messages_redelivered_total, counter, "Total number of messages redelivered to consumers"},
- {7, undefined, channel_messages_acked_total, counter, "Total number of messages acknowledged by consumers"},
- {8, undefined, channel_get_empty_total, counter, "Total number of times basic.get operations fetched no message"}
- ]},
+%%% Those are global, i.e. they contain no reference to queue/vhost/channel
{connection_churn_metrics, [
{2, undefined, connections_opened_total, counter, "Total number of connections opened"},
{3, undefined, connections_closed_total, counter, "Total number of connections closed or terminated"},
@@ -92,24 +66,6 @@
{7, undefined, queues_created_total, counter, "Total number of queues created"},
{8, undefined, queues_deleted_total, counter, "Total number of queues deleted"}
]},
-
- {connection_coarse_metrics, [
- {2, undefined, connection_incoming_bytes_total, counter, "Total number of bytes received on a connection"},
- {3, undefined, connection_outgoing_bytes_total, counter, "Total number of bytes sent on a connection"},
- {4, undefined, connection_process_reductions_total, counter, "Total number of connection process reductions"}
- ]},
-
- {connection_metrics, [
- {2, undefined, connection_incoming_packets_total, counter, "Total number of packets received on a connection", recv_cnt},
- {2, undefined, connection_outgoing_packets_total, counter, "Total number of packets sent on a connection", send_cnt},
- {2, undefined, connection_pending_packets, gauge, "Number of packets waiting to be sent on a connection", send_pend},
- {2, undefined, connection_channels, gauge, "Channels on a connection", channels}
- ]},
-
- {channel_queue_exchange_metrics, [
- {2, undefined, queue_messages_published_total, counter, "Total number of messages published to queues"}
- ]},
-
{node_coarse_metrics, [
{2, undefined, process_open_fds, gauge, "Open file descriptors", fd_used},
{2, undefined, process_open_tcp_sockets, gauge, "Open TCP sockets", sockets_used},
@@ -120,7 +76,6 @@
{2, undefined, erlang_gc_reclaimed_bytes_total, counter, "Total number of bytes of memory reclaimed by Erlang garbage collector", gc_bytes_reclaimed},
{2, undefined, erlang_scheduler_context_switches_total, counter, "Total number of Erlang scheduler context switches", context_switches}
]},
-
{node_metrics, [
{2, undefined, process_max_fds, gauge, "Open file descriptors limit", fd_total},
{2, undefined, process_max_tcp_sockets, gauge, "Open TCP sockets limit", sockets_total},
@@ -164,6 +119,19 @@
{7, ?MILLISECOND, raft_entry_commit_latency_seconds, gauge, "Time taken for a log entry to be committed"}
]},
+ {auth_attempt_metrics, [
+ {2, undefined, auth_attempts_total, counter, "Total number of authorization attempts"},
+ {3, undefined, auth_attempts_succeeded_total, counter, "Total number of successful authentication attempts"},
+ {4, undefined, auth_attempts_failed_total, counter, "Total number of failed authentication attempts"}
+ ]},
+
+ {auth_attempt_detailed_metrics, [
+ {2, undefined, auth_attempts_detailed_total, counter, "Total number of authorization attempts with source info"},
+ {3, undefined, auth_attempts_detailed_succeeded_total, counter, "Total number of successful authorization attempts with source info"},
+ {4, undefined, auth_attempts_detailed_failed_total, counter, "Total number of failed authorization attempts with source info"}
+ ]},
+
+%%% Those metrics have reference only to a queue name. This is the only group where filtering (e.g. by vhost) makes sense.
{queue_coarse_metrics, [
{2, undefined, queue_messages_ready, gauge, "Messages ready to be delivered to consumers"},
{3, undefined, queue_messages_unacked, gauge, "Messages delivered to consumers but not yet acknowledged"},
@@ -171,9 +139,14 @@
{5, undefined, queue_process_reductions_total, counter, "Total number of queue process reductions"}
]},
+ {queue_consumer_count, [
+ {2, undefined, queue_consumers, gauge, "Consumers on a queue", consumers}
+ ]},
+
{queue_metrics, [
{2, undefined, queue_consumers, gauge, "Consumers on a queue", consumers},
- {2, undefined, queue_consumer_utilisation, gauge, "Consumer utilisation", consumer_utilisation},
+ {2, undefined, queue_consumer_capacity, gauge, "Consumer capacity", consumer_capacity},
+ {2, undefined, queue_consumer_utilisation, gauge, "Same as consumer capacity", consumer_utilisation},
{2, undefined, queue_process_memory_bytes, gauge, "Memory in bytes used by the Erlang queue process", memory},
{2, undefined, queue_messages_ram, gauge, "Ready and unacknowledged messages stored in memory", messages_ram},
{2, undefined, queue_messages_ram_bytes, gauge, "Size of ready and unacknowledged messages stored in memory", message_bytes_ram},
@@ -190,18 +163,69 @@
{2, undefined, queue_disk_writes_total, counter, "Total number of times queue wrote messages to disk", disk_writes}
]},
- {auth_attempt_metrics, [
- {2, undefined, auth_attempts_total, counter, "Total number of authorization attempts on a node"},
- {3, undefined, auth_attempts_succeeded_total, counter, "Total number of successful authorization attempts on a node"},
- {4, undefined, auth_attempts_failed_total, counter, "Total number of failed authorization attempts on a node"}
+%%% Metrics that contain reference to a channel. Some of them also have
+%%% a queue name, but in this case filtering on it doesn't make any
+%%% sense, as the queue is not an object of interest here.
+ {channel_metrics, [
+ {2, undefined, channel_consumers, gauge, "Consumers on a channel", consumer_count},
+ {2, undefined, channel_messages_unacked, gauge, "Delivered but not yet acknowledged messages", messages_unacknowledged},
+ {2, undefined, channel_messages_unconfirmed, gauge, "Published but not yet confirmed messages", messages_unconfirmed},
+ {2, undefined, channel_messages_uncommitted, gauge, "Messages received in a transaction but not yet committed", messages_uncommitted},
+ {2, undefined, channel_acks_uncommitted, gauge, "Message acknowledgements in a transaction not yet committed", acks_uncommitted},
+ {2, undefined, consumer_prefetch, gauge, "Limit of unacknowledged messages for each consumer", prefetch_count},
+ {2, undefined, channel_prefetch, gauge, "Total limit of unacknowledged messages for all consumers on a channel", global_prefetch_count}
]},
- {auth_attempt_detailed_metrics, [
- {2, undefined, auth_attempts_total, counter, "Total number of authorization attempts on a node"},
- {3, undefined, auth_attempts_succeeded_total, counter, "Total number of successful authorization attempts on a node"},
- {4, undefined, auth_attempts_failed_total, counter, "Total number of failed authorization attempts on a node"}
+ {channel_exchange_metrics, [
+ {2, undefined, channel_messages_published_total, counter, "Total number of messages published into an exchange on a channel"},
+ {3, undefined, channel_messages_confirmed_total, counter, "Total number of messages published into an exchange and confirmed on the channel"},
+ {4, undefined, channel_messages_unroutable_returned_total, counter, "Total number of messages published as mandatory into an exchange and returned to the publisher as unroutable"},
+ {5, undefined, channel_messages_unroutable_dropped_total, counter, "Total number of messages published as non-mandatory into an exchange and dropped as unroutable"}
+ ]},
+
+ {channel_process_metrics, [
+ {2, undefined, channel_process_reductions_total, counter, "Total number of channel process reductions"}
+ ]},
+
+ {channel_queue_metrics, [
+ {2, undefined, channel_get_ack_total, counter, "Total number of messages fetched with basic.get in manual acknowledgement mode"},
+ {3, undefined, channel_get_total, counter, "Total number of messages fetched with basic.get in automatic acknowledgement mode"},
+ {4, undefined, channel_messages_delivered_ack_total, counter, "Total number of messages delivered to consumers in manual acknowledgement mode"},
+ {5, undefined, channel_messages_delivered_total, counter, "Total number of messages delivered to consumers in automatic acknowledgement mode"},
+ {6, undefined, channel_messages_redelivered_total, counter, "Total number of messages redelivered to consumers"},
+ {7, undefined, channel_messages_acked_total, counter, "Total number of messages acknowledged by consumers"},
+ {8, undefined, channel_get_empty_total, counter, "Total number of times basic.get operations fetched no message"}
+ ]},
+
+ {connection_coarse_metrics, [
+ {2, undefined, connection_incoming_bytes_total, counter, "Total number of bytes received on a connection"},
+ {3, undefined, connection_outgoing_bytes_total, counter, "Total number of bytes sent on a connection"},
+ {4, undefined, connection_process_reductions_total, counter, "Total number of connection process reductions"}
+ ]},
+
+ {connection_metrics, [
+ {2, undefined, connection_incoming_packets_total, counter, "Total number of packets received on a connection", recv_cnt},
+ {2, undefined, connection_outgoing_packets_total, counter, "Total number of packets sent on a connection", send_cnt},
+ {2, undefined, connection_pending_packets, gauge, "Number of packets waiting to be sent on a connection", send_pend},
+ {2, undefined, connection_channels, gauge, "Channels on a connection", channels}
+ ]},
+
+ {channel_queue_exchange_metrics, [
+ {2, undefined, queue_messages_published_total, counter, "Total number of messages published to queues"}
]}
+]).
+%% Metrics that can be only requested through `/metrics/detailed`
+-define(METRICS_CLUSTER,[
+ {vhost_status, [
+ {2, undefined, vhost_status, gauge, "Whether a given vhost is running"}
+ ]},
+ {exchange_bindings, [
+ {2, undefined, exchange_bindings, gauge, "Number of bindings for an exchange. This value is cluster-wide."}
+ ]},
+ {exchange_names, [
+ {2, undefined, exchange_name, gauge, "Enumerates exchanges without any additional info. This value is cluster-wide. A cheaper alternative to `exchange_bindings`"}
+ ]}
]).
-define(TOTALS, [
@@ -221,26 +245,54 @@ register() ->
deregister_cleanup(_) -> ok.
+collect_mf('detailed', Callback) ->
+ collect(true, ?DETAILED_METRIC_NAME_PREFIX, vhosts_filter_from_pdict(), queues_filter_from_pdict(), enabled_mfs_from_pdict(?METRICS_RAW), Callback),
+ collect(true, ?CLUSTER_METRIC_NAME_PREFIX, vhosts_filter_from_pdict(), queues_filter_from_pdict(), enabled_mfs_from_pdict(?METRICS_CLUSTER), Callback),
+ %% identity is here to enable filtering on a cluster name (as already happens in existing dashboards)
+ emit_identity_info(Callback),
+ ok;
+collect_mf('per-object', Callback) ->
+ collect(true, ?METRIC_NAME_PREFIX, false, queues_filter_from_pdict(), ?METRICS_RAW, Callback),
+ totals(Callback),
+ emit_identity_info(Callback),
+ ok;
collect_mf(_Registry, Callback) ->
- {ok, PerObjectMetrics} = application:get_env(rabbitmq_prometheus, return_per_object_metrics),
+ PerObjectMetrics = application:get_env(rabbitmq_prometheus, return_per_object_metrics, false),
+ collect(PerObjectMetrics, ?METRIC_NAME_PREFIX, false, queues_filter_from_pdict(), ?METRICS_RAW, Callback),
+ totals(Callback),
+ emit_identity_info(Callback),
+ ok.
+
+collect(PerObjectMetrics, Prefix, VHostsFilter, QueuesFilter, IncludedMFs, Callback) ->
[begin
- Data = get_data(Table, PerObjectMetrics),
- mf(Callback, Contents, Data)
- end || {Table, Contents} <- ?METRICS_RAW, needs_processing(PerObjectMetrics, Table)],
+ Data = get_data(Table, PerObjectMetrics, VHostsFilter, QueuesFilter),
+ mf(Callback, Prefix, Contents, Data)
+ end || {Table, Contents} <- IncludedMFs, not mutually_exclusive_mf(PerObjectMetrics, Table, IncludedMFs)].
+
+totals(Callback) ->
[begin
Size = ets:info(Table, size),
mf_totals(Callback, Name, Type, Help, Size)
end || {Table, Name, Type, Help} <- ?TOTALS],
+ ok.
+
+emit_identity_info(Callback) ->
add_metric_family(build_info(), Callback),
add_metric_family(identity_info(), Callback),
ok.
-needs_processing(false, auth_attempt_detailed_metrics) ->
- %% When per object metrics are disabled the detailed authentication attempt metrics
- %% create duplicates. Totals are carried on `auth_attempt_metrics`
- false;
-needs_processing(_, _) ->
- true.
+%% Aggregated `auth``_attempt_detailed_metrics` and
+%% `auth_attempt_metrics` are the same numbers. The former is just
+%% more computationally intensive.
+mutually_exclusive_mf(false, auth_attempt_detailed_metrics, _) ->
+ true;
+%% `queue_consumer_count` is a strict subset of queue metrics. They
+%% read from the same table, but `queue_consumer_count` skips a lot of
+%% `proplists:get_value/2` calls.
+mutually_exclusive_mf(_, queue_consumer_count, MFs) ->
+ lists:keymember(queue_metrics, 1, MFs);
+mutually_exclusive_mf(_, _, _) ->
+ false.
build_info() ->
ProductInfo = rabbit:product_info(),
@@ -283,16 +335,18 @@ identity_info() ->
[{
[
{rabbitmq_node, node()},
- {rabbitmq_cluster, rabbit_nodes:cluster_name()}
+ {rabbitmq_cluster, rabbit_nodes:cluster_name()},
+ {rabbitmq_cluster_permanent_id, rabbit_nodes:persistent_cluster_id()}
],
1
}]
}.
add_metric_family({Name, Type, Help, Metrics}, Callback) ->
- Callback(create_mf(?METRIC_NAME(Name), Help, Type, Metrics)).
+ MN = <<?METRIC_NAME_PREFIX/binary, (prometheus_model_helpers:metric_name(Name))/binary>>,
+ Callback(create_mf(MN, Help, Type, Metrics)).
-mf(Callback, Contents, Data) ->
+mf(Callback, Prefix, Contents, Data) ->
[begin
Fun = case Conversion of
undefined ->
@@ -302,7 +356,7 @@ mf(Callback, Contents, Data) ->
end,
Callback(
create_mf(
- ?METRIC_NAME(Name),
+ <<Prefix/binary, (prometheus_model_helpers:metric_name(Name))/binary>>,
Help,
catch_boolean(Type),
?MODULE,
@@ -319,7 +373,7 @@ mf(Callback, Contents, Data) ->
end,
Callback(
create_mf(
- ?METRIC_NAME(Name),
+ <<Prefix/binary, (prometheus_model_helpers:metric_name(Name))/binary>>,
Help,
catch_boolean(Type),
?MODULE,
@@ -331,7 +385,7 @@ mf(Callback, Contents, Data) ->
mf_totals(Callback, Name, Type, Help, Size) ->
Callback(
create_mf(
- ?METRIC_NAME(Name),
+ <<?METRIC_NAME_PREFIX/binary, (prometheus_model_helpers:metric_name(Name))/binary>>,
Help,
catch_boolean(Type),
Size
@@ -344,15 +398,27 @@ collect_metrics(_, {Type, Fun, Items}) ->
labels(Item) ->
label(element(1, Item)).
+label(L) when is_binary(L) ->
+ L;
+label(M) when is_map(M) ->
+ maps:fold(fun (K, V, Acc = <<>>) ->
+ <<Acc/binary, K/binary, "=\"", V/binary, "\"">>;
+ (K, V, Acc) ->
+ <<Acc/binary, ",", K/binary, "=\"", V/binary, "\"">>
+ end, <<>>, M);
label(#resource{virtual_host = VHost, kind = exchange, name = Name}) ->
- [{vhost, VHost}, {exchange, Name}];
+ <<"vhost=\"", VHost/binary, "\",exchange=\"", Name/binary, "\"">>;
label(#resource{virtual_host = VHost, kind = queue, name = Name}) ->
- [{vhost, VHost}, {queue, Name}];
+ <<"vhost=\"", VHost/binary, "\",queue=\"", Name/binary, "\"">>;
label({P, {#resource{virtual_host = QVHost, kind = queue, name = QName},
#resource{virtual_host = EVHost, kind = exchange, name = EName}}}) when is_pid(P) ->
%% channel_queue_exchange_metrics {channel_id, {queue_id, exchange_id}}
- [{channel, P}, {queue_vhost, QVHost}, {queue, QName},
- {exchange_vhost, EVHost}, {exchange, EName}];
+ <<"channel=\"", (iolist_to_binary(pid_to_list(P)))/binary, "\",",
+ "queue_vhost=\"", QVHost/binary, "\",",
+ "queue=\"", QName/binary, "\",",
+ "exchange_vhost=\"", EVHost/binary, "\",",
+ "exchange=\"", EName/binary, "\""
+ >>;
label({RemoteAddress, Username, Protocol}) when is_binary(RemoteAddress), is_binary(Username),
is_atom(Protocol) ->
lists:filter(fun({_, V}) ->
@@ -360,13 +426,17 @@ label({RemoteAddress, Username, Protocol}) when is_binary(RemoteAddress), is_bin
end, [{remote_address, RemoteAddress}, {username, Username},
{protocol, atom_to_binary(Protocol, utf8)}]);
label({I1, I2}) ->
- label(I1) ++ label(I2);
+ case {label(I1), label(I2)} of
+ {<<>>, L} -> L;
+ {L, <<>>} -> L;
+ {L1, L2} -> <<L1/binary, ",", L2/binary>>
+ end;
label(P) when is_pid(P) ->
- [{channel, P}];
+ <<"channel=\"", (iolist_to_binary(pid_to_list(P)))/binary, "\"">>;
label(A) when is_atom(A) ->
case is_protocol(A) of
- true -> [{protocol, atom_to_binary(A, utf8)}];
- false -> []
+ true -> <<"protocol=\"", (atom_to_binary(A, utf8))/binary, "\"">>;
+ false -> <<>>
end.
is_protocol(P) ->
@@ -412,7 +482,7 @@ emit_gauge_metric_if_defined(Labels, Value) ->
gauge_metric(Labels, Value)
end.
-get_data(connection_metrics = Table, false) ->
+get_data(connection_metrics = Table, false, _, _) ->
{Table, A1, A2, A3, A4} = ets:foldl(fun({_, Props}, {T, A1, A2, A3, A4}) ->
{T,
sum(proplists:get_value(recv_cnt, Props), A1),
@@ -421,7 +491,7 @@ get_data(connection_metrics = Table, false) ->
sum(proplists:get_value(channels, Props), A4)}
end, empty(Table), Table),
[{Table, [{recv_cnt, A1}, {send_cnt, A2}, {send_pend, A3}, {channels, A4}]}];
-get_data(channel_metrics = Table, false) ->
+get_data(channel_metrics = Table, false, _, _) ->
{Table, A1, A2, A3, A4, A5, A6, A7} =
ets:foldl(fun({_, Props}, {T, A1, A2, A3, A4, A5, A6, A7}) ->
{T,
@@ -436,28 +506,42 @@ get_data(channel_metrics = Table, false) ->
[{Table, [{consumer_count, A1}, {messages_unacknowledged, A2}, {messages_unconfirmed, A3},
{messages_uncommitted, A4}, {acks_uncommitted, A5}, {prefetch_count, A6},
{global_prefetch_count, A7}]}];
-get_data(queue_metrics = Table, false) ->
+get_data(queue_consumer_count = MF, false, VHostsFilter, QueuesFilter) ->
+ Table = queue_metrics, %% Real table name
+ {_, A1} = ets:foldl(fun
+ ({#resource{kind = queue, virtual_host = VHost}, _, _}, Acc) when is_map(VHostsFilter), map_get(VHost, VHostsFilter) == false ->
+ Acc;
+ ({#resource{kind = queue, name = Name}, Props, _}, {T, A1} = Acc)
+ when is_list(QueuesFilter) ->
+ case re:run(Name, QueuesFilter, [{capture, none}]) of
+ match ->
+ Acc;
+ nomatch ->
+ {T,
+ sum(proplists:get_value(consumers, Props), A1)
+ }
+ end;
+ ({_, Props, _}, {T, A1}) ->
+ {T,
+ sum(proplists:get_value(consumers, Props), A1)
+ }
+ end, empty(MF), Table),
+ [{Table, [{consumers, A1}]}];
+get_data(queue_metrics = Table, false, VHostsFilter, QueuesFilter) ->
{Table, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16} =
- ets:foldl(fun({_, Props, _}, {T, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10,
- A11, A12, A13, A14, A15, A16}) ->
- {T,
- sum(proplists:get_value(consumers, Props), A1),
- sum(proplists:get_value(consumer_utilisation, Props), A2),
- sum(proplists:get_value(memory, Props), A3),
- sum(proplists:get_value(messages_ram, Props), A4),
- sum(proplists:get_value(message_bytes_ram, Props), A5),
- sum(proplists:get_value(messages_ready_ram, Props), A6),
- sum(proplists:get_value(messages_unacknowledged_ram, Props), A7),
- sum(proplists:get_value(messages_persistent, Props), A8),
- sum(proplists:get_value(message_bytes_persistent, Props), A9),
- sum(proplists:get_value(message_bytes, Props), A10),
- sum(proplists:get_value(message_bytes_ready, Props), A11),
- sum(proplists:get_value(message_bytes_unacknowledged, Props), A12),
- sum(proplists:get_value(messages_paged_out, Props), A13),
- sum(proplists:get_value(message_bytes_paged_out, Props), A14),
- sum(proplists:get_value(disk_reads, Props), A15),
- sum(proplists:get_value(disk_writes, Props), A16)
- }
+ ets:foldl(fun
+ ({#resource{kind = queue, virtual_host = VHost}, _, _}, Acc) when is_map(VHostsFilter), map_get(VHost, VHostsFilter) == false ->
+ Acc;
+ ({#resource{kind = queue, name = Name}, Props, _}, Acc)
+ when is_list(QueuesFilter) ->
+ case re:run(Name, QueuesFilter, [{capture, none}]) of
+ match ->
+ Acc;
+ nomatch ->
+ sum_queue_metrics(Props, Acc)
+ end;
+ ({_, Props, _}, Acc) ->
+ sum_queue_metrics(Props, Acc)
end, empty(Table), Table),
[{Table, [{consumers, A1}, {consumer_utilisation, A2}, {memory, A3}, {messages_ram, A4},
{message_bytes_ram, A5}, {messages_ready_ram, A6},
@@ -466,14 +550,26 @@ get_data(queue_metrics = Table, false) ->
{message_bytes_ready, A11}, {message_bytes_unacknowledged, A12},
{messages_paged_out, A13}, {message_bytes_paged_out, A14},
{disk_reads, A15}, {disk_writes, A16}]}];
-get_data(Table, false) when Table == channel_exchange_metrics;
+get_data(Table, false, VHostsFilter, QueuesFilter) when Table == channel_exchange_metrics;
Table == queue_coarse_metrics;
Table == channel_queue_metrics;
Table == connection_coarse_metrics;
Table == channel_queue_exchange_metrics;
Table == ra_metrics;
Table == channel_process_metrics ->
- Result = ets:foldl(fun({_, V1}, {T, A1}) ->
+ Result = ets:foldl(fun
+ %% For queue_coarse_metrics
+ ({#resource{kind = queue, virtual_host = VHost}, _, _, _, _}, Acc) when is_map(VHostsFilter), map_get(VHost, VHostsFilter) == false ->
+ Acc;
+ ({#resource{kind = queue, name = Name}, V1, V2, V3, V4}, {T, A1, A2, A3, A4} = Acc)
+ when is_list(QueuesFilter) ->
+ case re:run(Name, QueuesFilter, [{capture, none}]) of
+ match ->
+ Acc;
+ nomatch ->
+ {T, V1 + A1, V2 + A2, V3 + A3, V4 + A4}
+ end;
+ ({_, V1}, {T, A1}) ->
{T, V1 + A1};
({_, V1, _}, {T, A1}) ->
{T, V1 + A1};
@@ -499,9 +595,88 @@ get_data(Table, false) when Table == channel_exchange_metrics;
_ ->
[Result]
end;
-get_data(Table, _) ->
+get_data(queue_coarse_metrics = Table, true, VHostsFilter, _) when is_map(VHostsFilter) ->
+ ets:foldl(fun
+ ({#resource{kind = queue, virtual_host = VHost}, _, _, _, _} = Row, Acc) when map_get(VHost, VHostsFilter) ->
+ [Row|Acc];
+ (_, Acc) ->
+ Acc
+ end, [], Table);
+get_data(MF, true, VHostsFilter, _) when is_map(VHostsFilter), MF == queue_metrics orelse MF == queue_consumer_count ->
+ Table = queue_metrics,
+ ets:foldl(fun
+ ({#resource{kind = queue, virtual_host = VHost}, _, _} = Row, Acc) when map_get(VHost, VHostsFilter) ->
+ [Row|Acc];
+ (_, Acc) ->
+ Acc
+ end, [], Table);
+get_data(queue_consumer_count, true, _, _) ->
+ ets:tab2list(queue_metrics);
+get_data(vhost_status, _, _, _) ->
+ [ { #{<<"vhost">> => VHost},
+ case rabbit_vhost_sup_sup:is_vhost_alive(VHost) of
+ true -> 1;
+ false -> 0
+ end}
+ || VHost <- rabbit_vhost:list() ];
+get_data(exchange_bindings, _, _, _) ->
+ Exchanges = lists:foldl(fun
+ (#exchange{internal = true}, Acc) ->
+ Acc;
+ (#exchange{name = #resource{name = <<>>}}, Acc) ->
+ Acc;
+ (#exchange{name = EName, type = EType}, Acc) ->
+ maps:put(EName, #{type => atom_to_binary(EType), binding_count => 0}, Acc)
+ end, #{}, rabbit_exchange:list()),
+ WithCount = ets:foldl(
+ fun (#route{binding = #binding{source = EName}}, Acc) ->
+ case maps:is_key(EName, Acc) of
+ false -> Acc;
+ true ->
+ maps:update_with(EName, fun (R = #{binding_count := Cnt}) ->
+ R#{binding_count => Cnt + 1}
+ end, Acc)
+ end
+ end, Exchanges, rabbit_route),
+ maps:fold(fun(#resource{virtual_host = VHost, name = Name}, #{type := Type, binding_count := Bindings}, Acc) ->
+ [{<<"vhost=\"", VHost/binary, "\",exchange=\"", Name/binary, "\",type=\"", Type/binary, "\"">>,
+ Bindings}|Acc]
+ end, [], WithCount);
+get_data(exchange_names, _, _, _) ->
+ lists:foldl(fun
+ (#exchange{internal = true}, Acc) ->
+ Acc;
+ (#exchange{name = #resource{name = <<>>}}, Acc) ->
+ Acc;
+ (#exchange{name = #resource{virtual_host = VHost, name = Name}, type = EType}, Acc) ->
+ Label = <<"vhost=\"", VHost/binary, "\",exchange=\"", Name/binary, "\",type=\"", (atom_to_binary(EType))/binary, "\"">>,
+ [{Label, 1}|Acc]
+ end, [], rabbit_exchange:list());
+get_data(Table, _, _, _) ->
ets:tab2list(Table).
+
+sum_queue_metrics(Props, {T, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11,
+ A12, A13, A14, A15, A16}) ->
+ {T,
+ sum(proplists:get_value(consumers, Props), A1),
+ sum(proplists:get_value(consumer_utilisation, Props), A2),
+ sum(proplists:get_value(memory, Props), A3),
+ sum(proplists:get_value(messages_ram, Props), A4),
+ sum(proplists:get_value(message_bytes_ram, Props), A5),
+ sum(proplists:get_value(messages_ready_ram, Props), A6),
+ sum(proplists:get_value(messages_unacknowledged_ram, Props), A7),
+ sum(proplists:get_value(messages_persistent, Props), A8),
+ sum(proplists:get_value(message_bytes_persistent, Props), A9),
+ sum(proplists:get_value(message_bytes, Props), A10),
+ sum(proplists:get_value(message_bytes_ready, Props), A11),
+ sum(proplists:get_value(message_bytes_unacknowledged, Props), A12),
+ sum(proplists:get_value(messages_paged_out, Props), A13),
+ sum(proplists:get_value(message_bytes_paged_out, Props), A14),
+ sum(proplists:get_value(disk_reads, Props), A15),
+ sum(proplists:get_value(disk_writes, Props), A16)
+ }.
+
division(0, 0) ->
0;
division(A, B) ->
@@ -510,7 +685,7 @@ division(A, B) ->
accumulate_count_and_sum(Value, {Count, Sum}) ->
{Count + 1, Sum + Value}.
-empty(T) when T == channel_queue_exchange_metrics; T == channel_process_metrics ->
+empty(T) when T == channel_queue_exchange_metrics; T == channel_process_metrics; T == queue_consumer_count ->
{T, 0};
empty(T) when T == connection_coarse_metrics; T == auth_attempt_metrics; T == auth_attempt_detailed_metrics ->
{T, 0, 0, 0};
@@ -529,3 +704,31 @@ sum('', B) ->
B;
sum(A, B) ->
A + B.
+
+enabled_mfs_from_pdict(AllMFs) ->
+ case get(prometheus_mf_filter) of
+ undefined ->
+ [];
+ MFNames ->
+ MFNameSet = sets:from_list(MFNames),
+ [ MF || MF = {Table, _} <- AllMFs, sets:is_element(Table, MFNameSet) ]
+ end.
+
+vhosts_filter_from_pdict() ->
+ case get(prometheus_vhost_filter) of
+ undefined ->
+ false;
+ L ->
+ %% Having both excluded and included hosts in this map makes some guards easier (or even possible).
+ All = maps:from_list([ {VHost, false} || VHost <- rabbit_vhost:list()]),
+ Enabled = maps:from_list([ {VHost, true} || VHost <- L ]),
+ maps:merge(All, Enabled)
+ end.
+
+queues_filter_from_pdict() ->
+ case get(prometheus_queue_filter) of
+ undefined ->
+ false;
+ Pattern ->
+ Pattern
+ end.
diff --git a/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_global_metrics_collector.erl b/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_global_metrics_collector.erl
new file mode 100644
index 0000000000..b13dfa0bd8
--- /dev/null
+++ b/deps/rabbitmq_prometheus/src/collectors/prometheus_rabbitmq_global_metrics_collector.erl
@@ -0,0 +1,50 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+-module(prometheus_rabbitmq_global_metrics_collector).
+
+-behaviour(prometheus_collector).
+-include_lib("prometheus/include/prometheus.hrl").
+
+-export([register/0,
+ deregister_cleanup/1,
+ collect_mf/2]).
+
+-import(prometheus_model_helpers, [create_mf/4,
+ counter_metric/2]).
+
+%% This exposes the new global metrics which are instrumented directly, and bypass the Rabbit Core metrics entirely.
+%% The short-term plan is to start building the new metrics sub-system alongside RabbitMQ Core metrics
+%% The long-term plan is to replace RabbitMQ Core metrics with this new approach.
+%%
+-define(METRIC_NAME_PREFIX, "rabbitmq_global_").
+
+%%====================================================================
+%% Collector API
+%%====================================================================
+
+register() ->
+ ok = prometheus_registry:register_collector(?MODULE).
+
+deregister_cleanup(_) -> ok.
+
+collect_mf(_Registry, Callback) ->
+ _ = maps:fold(
+ fun (Name, #{type := Type, help := Help, values := Values}, Acc) ->
+ Callback(
+ create_mf(?METRIC_NAME(Name),
+ Help,
+ Type,
+ maps:to_list(Values))),
+ Acc
+ end,
+ ok,
+ rabbit_global_counters:prometheus_format()
+ ).
+
+%% ===================================================================
+%% Private functions
+%% ===================================================================
diff --git a/deps/rabbitmq_prometheus/src/rabbit_prometheus_app.erl b/deps/rabbitmq_prometheus/src/rabbit_prometheus_app.erl
index 4aa2934b7e..c5450c23be 100644
--- a/deps/rabbitmq_prometheus/src/rabbit_prometheus_app.erl
+++ b/deps/rabbitmq_prometheus/src/rabbit_prometheus_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_prometheus_app).
@@ -74,14 +74,7 @@ has_configured_listener(Key) ->
end.
get_tls_listener() ->
- {ok, Listener0} = application:get_env(rabbitmq_prometheus, ssl_config),
- case proplists:get_value(cowboy_opts, Listener0) of
- undefined ->
- [{ssl, true}, {ssl_opts, Listener0}];
- CowboyOpts ->
- Listener1 = lists:keydelete(cowboy_opts, 1, Listener0),
- [{ssl, true}, {ssl_opts, Listener1}, {cowboy_opts, CowboyOpts}]
- end.
+ [{ssl, true} | application:get_env(rabbitmq_prometheus, ssl_config, [])].
get_tcp_listener() ->
application:get_env(rabbitmq_prometheus, tcp_config, []).
@@ -111,8 +104,9 @@ ensure_port_and_protocol(tcp, Protocol, Listener) ->
do_ensure_port_and_protocol(?DEFAULT_PORT, Protocol, Listener).
do_ensure_port_and_protocol(Port, Protocol, Listener) ->
- %% include default port if it's not provided in the config
- %% as Cowboy won't start if the port is missing
+ %% Include default port if it's not provided in the config
+ %% as Cowboy won't start if the port is missing.
+ %% Protocol is displayed in mgmt UI and CLI output.
M0 = maps:from_list(Listener),
M1 = maps:merge(#{port => Port, protocol => Protocol}, M0),
{ok, maps:to_list(M1)}.
diff --git a/deps/rabbitmq_prometheus/src/rabbit_prometheus_dispatcher.erl b/deps/rabbitmq_prometheus/src/rabbit_prometheus_dispatcher.erl
index 910123c005..45f5bb738d 100644
--- a/deps/rabbitmq_prometheus/src/rabbit_prometheus_dispatcher.erl
+++ b/deps/rabbitmq_prometheus/src/rabbit_prometheus_dispatcher.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_prometheus_dispatcher).
@@ -13,7 +13,23 @@
build_dispatcher() ->
{ok, _} = application:ensure_all_started(prometheus),
- prometheus_registry:register_collectors([prometheus_rabbitmq_core_metrics_collector]),
+ prometheus_registry:register_collectors([
+ prometheus_rabbitmq_core_metrics_collector,
+ prometheus_rabbitmq_global_metrics_collector,
+ prometheus_rabbitmq_alarm_metrics_collector]),
+ prometheus_registry:register_collectors('per-object', [
+ prometheus_vm_system_info_collector,
+ prometheus_vm_dist_collector,
+ prometheus_vm_memory_collector,
+ prometheus_mnesia_collector,
+ prometheus_vm_statistics_collector,
+ prometheus_vm_msacc_collector,
+ prometheus_rabbitmq_core_metrics_collector,
+ prometheus_rabbitmq_global_metrics_collector
+ ]),
+ prometheus_registry:register_collectors('detailed', [
+ prometheus_rabbitmq_core_metrics_collector
+ ]),
rabbit_prometheus_handler:setup(),
cowboy_router:compile([{'_', dispatcher()}]).
diff --git a/deps/rabbitmq_prometheus/src/rabbit_prometheus_handler.erl b/deps/rabbitmq_prometheus/src/rabbit_prometheus_handler.erl
index bb7a98150d..f7f4f11720 100644
--- a/deps/rabbitmq_prometheus/src/rabbit_prometheus_handler.erl
+++ b/deps/rabbitmq_prometheus/src/rabbit_prometheus_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_prometheus_handler).
@@ -31,20 +31,23 @@ is_authorized(ReqData, Context) ->
{true, ReqData, Context}.
setup() ->
- TelemetryRegistry = telemetry_registry(),
+ setup_metrics(telemetry_registry()),
+ setup_metrics('per-object'),
+ setup_metrics('detailed').
+setup_metrics(Registry) ->
ScrapeDuration = [{name, ?SCRAPE_DURATION},
{help, "Scrape duration"},
{labels, ["registry", "content_type"]},
- {registry, TelemetryRegistry}],
+ {registry, Registry}],
ScrapeSize = [{name, ?SCRAPE_SIZE},
{help, "Scrape size, not encoded"},
{labels, ["registry", "content_type"]},
- {registry, TelemetryRegistry}],
+ {registry, Registry}],
ScrapeEncodedSize = [{name, ?SCRAPE_ENCODED_SIZE},
{help, "Scrape size, encoded"},
{labels, ["registry", "content_type", "encoding"]},
- {registry, TelemetryRegistry}],
+ {registry, Registry}],
prometheus_summary:declare(ScrapeDuration),
prometheus_summary:declare(ScrapeSize),
@@ -65,6 +68,7 @@ gen_response(<<"GET">>, Request) ->
false ->
cowboy_req:reply(404, #{}, <<"Unknown Registry">>, Request);
Registry ->
+ put_filtering_options_into_process_dictionary(Request),
gen_metrics_response(Registry, Request)
end;
gen_response(_, Request) ->
@@ -106,14 +110,12 @@ format_metrics(Request, Registry) ->
encode_format(ContentType, binary_to_list(Encoding), Scrape, Registry).
render_format(ContentType, Registry) ->
- TelemetryRegistry = telemetry_registry(),
-
Scrape = prometheus_summary:observe_duration(
- TelemetryRegistry,
+ Registry,
?SCRAPE_DURATION,
[Registry, ContentType],
fun () -> prometheus_text_format:format(Registry) end),
- prometheus_summary:observe(TelemetryRegistry,
+ prometheus_summary:observe(Registry,
?SCRAPE_SIZE,
[Registry, ContentType],
iolist_size(Scrape)),
@@ -146,4 +148,49 @@ encode_format(ContentType, Encoding, Scrape, Registry) ->
encode_format_("gzip", Scrape) ->
zlib:gzip(Scrape);
encode_format_("identity", Scrape) ->
- Scrape.
+ Scrape.
+
+%% It's not easy to pass this information in a pure way (it'll require changing prometheus.erl)
+put_filtering_options_into_process_dictionary(Request) ->
+ #{vhost := VHosts, family := Families} = cowboy_req:match_qs([{vhost, [], undefined}, {family, [], undefined}], Request),
+ case parse_vhosts(VHosts) of
+ Vs when is_list(Vs) ->
+ put(prometheus_vhost_filter, Vs);
+ _ -> ok
+ end,
+ case parse_metric_families(Families) of
+ Fs when is_list(Fs) ->
+ put(prometheus_mf_filter, Fs);
+ _ -> ok
+ end,
+ case application:get_env(rabbitmq_prometheus, filter_aggregated_queue_metrics_pattern, undefined) of
+ undefined -> ok;
+ Pattern ->
+ {ok, CompiledPattern} = re:compile(Pattern),
+ put(prometheus_queue_filter, CompiledPattern)
+ end,
+ ok.
+
+parse_vhosts(N) when is_binary(N) ->
+ parse_vhosts([N]);
+parse_vhosts(L) when is_list(L) ->
+ [ VHostName || VHostName <- L, rabbit_vhost:exists(VHostName)];
+parse_vhosts(_) ->
+ false.
+
+parse_metric_families(N) when is_binary(N) ->
+ parse_metric_families([N]);
+parse_metric_families([]) ->
+ [];
+parse_metric_families([B|Bs]) ->
+ %% binary_to_existing_atom() should be enough, as it's used for filtering things out.
+ %% Getting a full list of supported metrics would be harder.
+ %% NB But on the other hand, it's nice to have validation. Implement it?
+ case catch erlang:binary_to_existing_atom(B) of
+ A when is_atom(A) ->
+ [A|parse_metric_families(Bs)];
+ _ ->
+ parse_metric_families(Bs)
+ end;
+parse_metric_families(_) ->
+ false.
diff --git a/deps/rabbitmq_prometheus/test/config_schema_SUITE.erl b/deps/rabbitmq_prometheus/test/config_schema_SUITE.erl
index 97719d9246..11b2813ff1 100644
--- a/deps/rabbitmq_prometheus/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_prometheus/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_prometheus/test/config_schema_SUITE_data/rabbitmq_prometheus.snippets b/deps/rabbitmq_prometheus/test/config_schema_SUITE_data/rabbitmq_prometheus.snippets
index 90b1b4c181..9e6d143503 100644
--- a/deps/rabbitmq_prometheus/test/config_schema_SUITE_data/rabbitmq_prometheus.snippets
+++ b/deps/rabbitmq_prometheus/test/config_schema_SUITE_data/rabbitmq_prometheus.snippets
@@ -144,12 +144,14 @@
{ssl_config,[
{ip, "192.168.1.2"},
{port,15691},
- {cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
- {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
- {keyfile,"test/config_schema_SUITE_data/certs/key.pem"},
- {verify, verify_none},
- {fail_if_no_peer_cert, false}
- ]}
+ {ssl_opts, [
+ {cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
+ {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
+ {keyfile,"test/config_schema_SUITE_data/certs/key.pem"},
+ {verify, verify_none},
+ {fail_if_no_peer_cert, false}
+ ]}
+ ]}
]}],
[rabbitmq_prometheus]},
@@ -184,31 +186,33 @@
{ssl_config,[
{ip, "192.168.1.2"},
{port,15691},
- {cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
- {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
- {keyfile,"test/config_schema_SUITE_data/certs/key.pem"},
+ {ssl_opts, [
+ {cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
+ {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
+ {keyfile,"test/config_schema_SUITE_data/certs/key.pem"},
- {verify, verify_peer},
- {fail_if_no_peer_cert, false},
+ {verify, verify_peer},
+ {fail_if_no_peer_cert, false},
- {honor_cipher_order, true},
- {honor_ecc_order, true},
- {client_renegotiation, false},
- {secure_renegotiate, true},
+ {honor_cipher_order, true},
+ {honor_ecc_order, true},
+ {client_renegotiation, false},
+ {secure_renegotiate, true},
- {versions,['tlsv1.2','tlsv1.1']},
- {ciphers, [
- "ECDHE-ECDSA-AES256-GCM-SHA384",
- "ECDHE-RSA-AES256-GCM-SHA384",
- "ECDHE-ECDSA-AES256-SHA384",
- "ECDHE-RSA-AES256-SHA384",
- "ECDH-ECDSA-AES256-GCM-SHA384",
- "ECDH-RSA-AES256-GCM-SHA384",
- "ECDH-ECDSA-AES256-SHA384",
- "ECDH-RSA-AES256-SHA384",
- "DHE-RSA-AES256-GCM-SHA384"
- ]}
- ]}
+ {versions,['tlsv1.2','tlsv1.1']},
+ {ciphers, [
+ "ECDHE-ECDSA-AES256-GCM-SHA384",
+ "ECDHE-RSA-AES256-GCM-SHA384",
+ "ECDHE-ECDSA-AES256-SHA384",
+ "ECDHE-RSA-AES256-SHA384",
+ "ECDH-ECDSA-AES256-GCM-SHA384",
+ "ECDH-RSA-AES256-GCM-SHA384",
+ "ECDH-ECDSA-AES256-SHA384",
+ "ECDH-RSA-AES256-SHA384",
+ "DHE-RSA-AES256-GCM-SHA384"
+ ]}
+ ]}
+ ]}
]}],
[rabbitmq_prometheus]},
diff --git a/deps/rabbitmq_prometheus/test/rabbit_prometheus_http_SUITE.erl b/deps/rabbitmq_prometheus/test/rabbit_prometheus_http_SUITE.erl
index 9ef4a43efa..c8be5fae28 100644
--- a/deps/rabbitmq_prometheus/test/rabbit_prometheus_http_SUITE.erl
+++ b/deps/rabbitmq_prometheus/test/rabbit_prometheus_http_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_prometheus_http_SUITE).
@@ -18,27 +18,46 @@ all() ->
[
{group, default_config},
{group, config_path},
- {group, config_port},
{group, aggregated_metrics},
{group, per_object_metrics},
- {group, commercial}
+ {group, per_object_endpoint_metrics},
+ {group, commercial},
+ {group, detailed_metrics}
].
groups() ->
[
{default_config, [], generic_tests()},
{config_path, [], generic_tests()},
- {config_port, [], generic_tests()},
{aggregated_metrics, [], [
aggregated_metrics_test,
- specific_erlang_metrics_present_test
+ specific_erlang_metrics_present_test,
+ global_metrics_present_test,
+ global_metrics_single_metric_family_test
]},
{per_object_metrics, [], [
- per_object_metrics_test,
+ globally_configure_per_object_metrics_test,
+ specific_erlang_metrics_present_test,
+ global_metrics_present_test,
+ global_metrics_single_metric_family_test
+ ]},
+ {per_object_endpoint_metrics, [], [
+ endpoint_per_object_metrics,
specific_erlang_metrics_present_test
]},
{commercial, [], [
build_info_product_test
+ ]},
+ {detailed_metrics, [], [
+ detailed_metrics_no_families_enabled_by_default,
+ queue_consumer_count_single_vhost_per_object_test,
+ queue_consumer_count_all_vhosts_per_object_test,
+ queue_coarse_metrics_per_object_test,
+ queue_metrics_per_object_test,
+ queue_consumer_count_and_queue_metrics_mutually_exclusive_test,
+ vhost_status_metric,
+ exchange_bindings_metric,
+ exchange_names_metric
]}
].
@@ -61,14 +80,89 @@ init_per_group(config_path, Config0) ->
PathConfig = {rabbitmq_prometheus, [{path, "/bunnieshop"}]},
Config1 = rabbit_ct_helpers:merge_app_env(Config0, PathConfig),
init_per_group(config_path, Config1, [{prometheus_path, "/bunnieshop"}]);
-init_per_group(config_port, Config0) ->
- PathConfig = {rabbitmq_prometheus, [{tcp_config, [{port, 15772}]}]},
- Config1 = rabbit_ct_helpers:merge_app_env(Config0, PathConfig),
- init_per_group(config_port, Config1, [{prometheus_port, 15772}]);
init_per_group(per_object_metrics, Config0) ->
PathConfig = {rabbitmq_prometheus, [{return_per_object_metrics, true}]},
Config1 = rabbit_ct_helpers:merge_app_env(Config0, PathConfig),
init_per_group(aggregated_metrics, Config1);
+init_per_group(per_object_endpoint_metrics, Config0) ->
+ PathConfig = {rabbitmq_prometheus, [
+ {return_per_object_metrics, false}
+ ]},
+ Config1 = rabbit_ct_helpers:merge_app_env(Config0, PathConfig),
+ init_per_group(aggregated_metrics, Config1);
+init_per_group(detailed_metrics, Config0) ->
+ StatsEnv = {rabbit, [{collect_statistics, coarse}, {collect_statistics_interval, 100}]},
+
+ Config1 = init_per_group(detailed_metrics, rabbit_ct_helpers:merge_app_env(Config0, StatsEnv), []),
+
+ rabbit_ct_broker_helpers:add_vhost(Config1, 0, <<"vhost-1">>, <<"guest">>),
+ rabbit_ct_broker_helpers:set_full_permissions(Config1, <<"vhost-1">>),
+ VHost1Conn = rabbit_ct_client_helpers:open_unmanaged_connection(Config1, 0, <<"vhost-1">>),
+ {ok, VHost1Ch} = amqp_connection:open_channel(VHost1Conn),
+
+ rabbit_ct_broker_helpers:add_vhost(Config1, 0, <<"vhost-2">>, <<"guest">>),
+ rabbit_ct_broker_helpers:set_full_permissions(Config1, <<"vhost-2">>),
+ VHost2Conn = rabbit_ct_client_helpers:open_unmanaged_connection(Config1, 0, <<"vhost-2">>),
+ {ok, VHost2Ch} = amqp_connection:open_channel(VHost2Conn),
+
+ DefaultCh = rabbit_ct_client_helpers:open_channel(Config1),
+
+ [
+ (fun () ->
+ QPart = case VHost of
+ <<"/">> -> <<"default">>;
+ _ -> VHost
+ end,
+ QName = << QPart/binary, "-", Q/binary>>,
+ #'queue.declare_ok'{} = amqp_channel:call(Ch,
+ #'queue.declare'{queue = QName,
+ durable = true
+
+ }),
+ lists:foreach( fun (_) ->
+ amqp_channel:cast(Ch,
+ #'basic.publish'{routing_key = QName},
+ #amqp_msg{payload = <<"msg">>})
+ end, lists:seq(1, MsgNum) ),
+ ExDirect = <<QName/binary, "-direct-exchange">>,
+ #'exchange.declare_ok'{} = amqp_channel:call(Ch, #'exchange.declare'{exchange = ExDirect}),
+ ExTopic = <<QName/binary, "-topic-exchange">>,
+ #'exchange.declare_ok'{} = amqp_channel:call(Ch, #'exchange.declare'{exchange = ExTopic, type = <<"topic">>}),
+ #'queue.bind_ok'{} = amqp_channel:call(Ch, #'queue.bind'{queue = QName, exchange = ExDirect, routing_key = QName}),
+ lists:foreach( fun (Idx) ->
+ #'queue.bind_ok'{} = amqp_channel:call(Ch, #'queue.bind'{queue = QName, exchange = ExTopic, routing_key = integer_to_binary(Idx)})
+ end, lists:seq(1, MsgNum) )
+ end)()
+ || {VHost, Ch, MsgNum} <- [{<<"/">>, DefaultCh, 3}, {<<"vhost-1">>, VHost1Ch, 7}, {<<"vhost-2">>, VHost2Ch, 11}],
+ Q <- [ <<"queue-with-messages">>, <<"queue-with-consumer">> ]
+ ],
+
+ DefaultConsumer = sleeping_consumer(),
+ #'basic.consume_ok'{consumer_tag = DefaultCTag} =
+ amqp_channel:subscribe(DefaultCh, #'basic.consume'{queue = <<"default-queue-with-consumer">>}, DefaultConsumer),
+
+ VHost1Consumer = sleeping_consumer(),
+ #'basic.consume_ok'{consumer_tag = VHost1CTag} =
+ amqp_channel:subscribe(VHost1Ch, #'basic.consume'{queue = <<"vhost-1-queue-with-consumer">>}, VHost1Consumer),
+
+ VHost2Consumer = sleeping_consumer(),
+ #'basic.consume_ok'{consumer_tag = VHost2CTag} =
+ amqp_channel:subscribe(VHost2Ch, #'basic.consume'{queue = <<"vhost-2-queue-with-consumer">>}, VHost2Consumer),
+
+ timer:sleep(1000),
+
+ Config1 ++ [ {default_consumer_pid, DefaultConsumer}
+ , {default_consumer_ctag, DefaultCTag}
+ , {default_channel, DefaultCh}
+ , {vhost1_consumer_pid, VHost1Consumer}
+ , {vhost1_consumer_ctag, VHost1CTag}
+ , {vhost1_channel, VHost1Ch}
+ , {vhost1_conn, VHost1Conn}
+ , {vhost2_consumer_pid, VHost2Consumer}
+ , {vhost2_consumer_ctag, VHost2CTag}
+ , {vhost2_channel, VHost2Ch}
+ , {vhost2_conn, VHost2Conn}
+ ];
init_per_group(aggregated_metrics, Config0) ->
Config1 = rabbit_ct_helpers:merge_app_env(
Config0,
@@ -122,6 +216,28 @@ end_per_group(aggregated_metrics, Config) ->
amqp_channel:call(Ch, #'queue.delete'{queue = ?config(queue_name, Config)}),
rabbit_ct_client_helpers:close_channel(Ch),
end_per_group_(Config);
+
+end_per_group(detailed_metrics, Config) ->
+ DefaultCh = ?config(default_channel, Config),
+ amqp_channel:call(DefaultCh, #'basic.cancel'{consumer_tag = ?config(default_consumer_ctag, Config)}),
+ ?config(default_consumer_pid, Config) ! stop,
+ rabbit_ct_client_helpers:close_channel(DefaultCh),
+
+ VHost1Ch = ?config(vhost1_channel, Config),
+ amqp_channel:call(VHost1Ch, #'basic.cancel'{consumer_tag = ?config(vhost1_consumer_ctag, Config)}),
+ ?config(vhost1_consumer_pid, Config) ! stop,
+ amqp_channel:close(VHost1Ch),
+ amqp_connection:close(?config(vhost1_conn, Config)),
+
+ VHost2Ch = ?config(vhost2_channel, Config),
+ amqp_channel:call(VHost2Ch, #'basic.cancel'{consumer_tag = ?config(vhost2_consumer_ctag, Config)}),
+ ?config(vhost2_consumer_pid, Config) ! stop,
+ amqp_channel:close(VHost2Ch),
+ amqp_connection:close(?config(vhost2_conn, Config)),
+
+ %% Delete queues?
+ end_per_group_(Config);
+
end_per_group(_, Config) ->
end_per_group_(Config).
@@ -158,7 +274,7 @@ get_test(Config) ->
{_Headers, Body} = http_get_with_pal(Config, [], 200),
%% Check that the body looks like a valid response
?assertEqual(match, re:run(Body, "TYPE", [{capture, none}])),
- Port = proplists:get_value(prometheus_port, Config, 15692),
+ Port = rabbit_mgmt_test_util:config_port(Config, tcp_port_prometheus),
URI = lists:flatten(io_lib:format("http://localhost:~p/metricsooops", [Port])),
{ok, {{_, CodeAct, _}, _, _}} = httpc:request(get, {URI, []}, ?HTTPC_OPTS, []),
?assertMatch(404, CodeAct).
@@ -206,6 +322,8 @@ aggregated_metrics_test(Config) ->
?assertEqual(match, re:run(Body, "^rabbitmq_raft_term_total ", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, "^rabbitmq_queue_messages_ready ", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, "^rabbitmq_queue_consumers ", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "TYPE rabbitmq_auth_attempts_total", [{capture, none}, multiline])),
+ ?assertEqual(nomatch, re:run(Body, "TYPE rabbitmq_auth_attempts_detailed_total", [{capture, none}, multiline])),
%% Check the first metric value in each ETS table that requires converting
?assertEqual(match, re:run(Body, "^rabbitmq_erlang_uptime_seconds ", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, "^rabbitmq_io_read_time_seconds_total ", [{capture, none}, multiline])),
@@ -214,8 +332,14 @@ aggregated_metrics_test(Config) ->
%% Check raft_entry_commit_latency_seconds because we are aggregating it
?assertEqual(match, re:run(Body, "^rabbitmq_raft_entry_commit_latency_seconds ", [{capture, none}, multiline])).
-per_object_metrics_test(Config) ->
- {_Headers, Body} = http_get_with_pal(Config, [], 200),
+endpoint_per_object_metrics(Config) ->
+ per_object_metrics_test(Config, "/metrics/per-object").
+
+globally_configure_per_object_metrics_test(Config) ->
+ per_object_metrics_test(Config, "/metrics").
+
+per_object_metrics_test(Config, Path) ->
+ {_Headers, Body} = http_get_with_pal(Config, Path, [], 200),
?assertEqual(match, re:run(Body, "^# TYPE", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, "^# HELP", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, ?config(queue_name, Config), [{capture, none}])),
@@ -234,6 +358,8 @@ per_object_metrics_test(Config) ->
?assertEqual(match, re:run(Body, "^rabbitmq_raft_term_total{", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, "^rabbitmq_queue_messages_ready{", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, "^rabbitmq_queue_consumers{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "TYPE rabbitmq_auth_attempts_total", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "TYPE rabbitmq_auth_attempts_detailed_total", [{capture, none}, multiline])),
%% Check the first metric value in each ETS table that requires converting
?assertEqual(match, re:run(Body, "^rabbitmq_erlang_uptime_seconds ", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, "^rabbitmq_io_read_time_seconds_total ", [{capture, none}, multiline])),
@@ -260,15 +386,170 @@ identity_info_test(Config) ->
{_Headers, Body} = http_get_with_pal(Config, [], 200),
?assertEqual(match, re:run(Body, "^rabbitmq_identity_info{", [{capture, none}, multiline])),
?assertEqual(match, re:run(Body, "rabbitmq_node=", [{capture, none}])),
- ?assertEqual(match, re:run(Body, "rabbitmq_cluster=", [{capture, none}])).
+ ?assertEqual(match, re:run(Body, "rabbitmq_cluster=", [{capture, none}])),
+ ?assertEqual(match, re:run(Body, "rabbitmq_cluster_permanent_id=", [{capture, none}])).
specific_erlang_metrics_present_test(Config) ->
{_Headers, Body} = http_get_with_pal(Config, [], 200),
?assertEqual(match, re:run(Body, "^erlang_vm_dist_node_queue_size_bytes{", [{capture, none}, multiline])).
+global_metrics_present_test(Config) ->
+ {_Headers, Body} = http_get_with_pal(Config, [], 200),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_received_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_received_confirm_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_routed_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_unroutable_dropped_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_unroutable_returned_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_confirmed_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_delivered_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_delivered_consume_manual_ack_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_delivered_consume_auto_ack_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_delivered_get_manual_ack_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_delivered_get_auto_ack_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_get_empty_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_redelivered_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_messages_acknowledged_total{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_publishers{", [{capture, none}, multiline])),
+ ?assertEqual(match, re:run(Body, "^rabbitmq_global_consumers{", [{capture, none}, multiline])).
+
+global_metrics_single_metric_family_test(Config) ->
+ {_Headers, Body} = http_get_with_pal(Config, [], 200),
+ {match, MetricFamilyMatches} = re:run(Body, "TYPE rabbitmq_global_messages_acknowledged_total", [global]),
+ ?assertEqual(1, length(MetricFamilyMatches)).
+
+queue_consumer_count_single_vhost_per_object_test(Config) ->
+ {_, Body} = http_get_with_pal(Config, "/metrics/detailed?vhost=vhost-1&family=queue_consumer_count&per-object=1", [], 200),
+
+ %% There should be exactly 2 metrics returned (2 queues in that vhost, `queue_consumer_count` has only single metric)
+ ?assertEqual(#{rabbitmq_detailed_queue_consumers =>
+ #{#{queue => "vhost-1-queue-with-consumer",vhost => "vhost-1"} => [1],
+ #{queue => "vhost-1-queue-with-messages",vhost => "vhost-1"} => [0]}},
+ parse_response(Body)),
+ ok.
+
+queue_consumer_count_all_vhosts_per_object_test(Config) ->
+ Expected = #{rabbitmq_detailed_queue_consumers =>
+ #{#{queue => "vhost-1-queue-with-consumer",vhost => "vhost-1"} => [1],
+ #{queue => "vhost-1-queue-with-messages",vhost => "vhost-1"} => [0],
+ #{queue => "vhost-2-queue-with-consumer",vhost => "vhost-2"} => [1],
+ #{queue => "vhost-2-queue-with-messages",vhost => "vhost-2"} => [0],
+ #{queue => "default-queue-with-consumer",vhost => "/"} => [1],
+ #{queue => "default-queue-with-messages",vhost => "/"} => [0]}},
+
+ %% No vhost given, all should be returned
+ {_, Body1} = http_get_with_pal(Config, "/metrics/detailed?family=queue_consumer_count&per-object=1", [], 200),
+ ?assertEqual(Expected, parse_response(Body1)),
+
+ %% Both vhosts are listed explicitly
+ {_, Body2} = http_get_with_pal(Config, "/metrics/detailed?vhost=vhost-1&vhost=vhost-2&vhost=%2f&family=queue_consumer_count&per-object=1", [], 200),
+ ?assertEqual(Expected, parse_response(Body2)),
+ ok.
+
+queue_coarse_metrics_per_object_test(Config) ->
+ Expected1 = #{#{queue => "vhost-1-queue-with-consumer", vhost => "vhost-1"} => [7],
+ #{queue => "vhost-1-queue-with-messages", vhost => "vhost-1"} => [7]},
+ Expected2 = #{#{queue => "vhost-2-queue-with-consumer", vhost => "vhost-2"} => [11],
+ #{queue => "vhost-2-queue-with-messages", vhost => "vhost-2"} => [11]},
+ ExpectedD = #{#{queue => "default-queue-with-consumer", vhost => "/"} => [3],
+ #{queue => "default-queue-with-messages", vhost => "/"} => [3]},
+
+ {_, Body1} = http_get_with_pal(Config, "/metrics/detailed?vhost=vhost-1&family=queue_coarse_metrics", [], 200),
+ ?assertEqual(Expected1,
+ map_get(rabbitmq_detailed_queue_messages, parse_response(Body1))),
+
+ {_, Body2} = http_get_with_pal(Config, "/metrics/detailed?family=queue_coarse_metrics", [], 200),
+ ?assertEqual(lists:foldl(fun maps:merge/2, #{}, [Expected1, Expected2, ExpectedD]),
+ map_get(rabbitmq_detailed_queue_messages, parse_response(Body2))),
+
+ {_, Body3} = http_get_with_pal(Config, "/metrics/detailed?vhost=vhost-1&vhost=vhost-2&family=queue_coarse_metrics", [], 200),
+ ?assertEqual(lists:foldl(fun maps:merge/2, #{}, [Expected1, Expected2]),
+ map_get(rabbitmq_detailed_queue_messages, parse_response(Body3))),
+ ok.
+
+queue_metrics_per_object_test(Config) ->
+ Expected1 = #{#{queue => "vhost-1-queue-with-consumer", vhost => "vhost-1"} => [7],
+ #{queue => "vhost-1-queue-with-messages", vhost => "vhost-1"} => [7]},
+ Expected2 = #{#{queue => "vhost-2-queue-with-consumer", vhost => "vhost-2"} => [11],
+ #{queue => "vhost-2-queue-with-messages", vhost => "vhost-2"} => [11]},
+ ExpectedD = #{#{queue => "default-queue-with-consumer", vhost => "/"} => [3],
+ #{queue => "default-queue-with-messages", vhost => "/"} => [3]},
+ {_, Body1} = http_get_with_pal(Config, "/metrics/detailed?vhost=vhost-1&family=queue_metrics", [], 200),
+ ?assertEqual(Expected1,
+ map_get(rabbitmq_detailed_queue_messages_ram, parse_response(Body1))),
+
+ {_, Body2} = http_get_with_pal(Config, "/metrics/detailed?family=queue_metrics", [], 200),
+ ?assertEqual(lists:foldl(fun maps:merge/2, #{}, [Expected1, Expected2, ExpectedD]),
+ map_get(rabbitmq_detailed_queue_messages_ram, parse_response(Body2))),
+
+ {_, Body3} = http_get_with_pal(Config, "/metrics/detailed?vhost=vhost-1&vhost=vhost-2&family=queue_metrics", [], 200),
+ ?assertEqual(lists:foldl(fun maps:merge/2, #{}, [Expected1, Expected2]),
+ map_get(rabbitmq_detailed_queue_messages_ram, parse_response(Body3))),
+ ok.
+
+queue_consumer_count_and_queue_metrics_mutually_exclusive_test(Config) ->
+ {_, Body1} = http_get_with_pal(Config, "/metrics/detailed?vhost=vhost-1&family=queue_consumer_count&family=queue_metrics", [], 200),
+ ?assertEqual(#{#{queue => "vhost-1-queue-with-consumer", vhost => "vhost-1"} => [1],
+ #{queue => "vhost-1-queue-with-messages", vhost => "vhost-1"} => [0]},
+ map_get(rabbitmq_detailed_queue_consumers, parse_response(Body1))),
+
+ ok.
+
+detailed_metrics_no_families_enabled_by_default(Config) ->
+ {_, Body} = http_get_with_pal(Config, "/metrics/detailed", [], 200),
+ ?assertEqual(#{}, parse_response(Body)),
+ ok.
+
+vhost_status_metric(Config) ->
+ {_, Body1} = http_get_with_pal(Config, "/metrics/detailed?family=vhost_status", [], 200),
+ Expected = #{rabbitmq_cluster_vhost_status =>
+ #{#{vhost => "vhost-1"} => [1],
+ #{vhost => "vhost-2"} => [1],
+ #{vhost => "/"} => [1]}},
+ ?assertEqual(Expected, parse_response(Body1)),
+ ok.
+
+exchange_bindings_metric(Config) ->
+ {_, Body1} = http_get_with_pal(Config, "/metrics/detailed?family=exchange_bindings", [], 200),
+
+ Bindings = map_get(rabbitmq_cluster_exchange_bindings, parse_response(Body1)),
+ ?assertEqual([11], map_get(#{vhost=>"vhost-2",exchange=>"vhost-2-queue-with-messages-topic-exchange",type=>"topic"}, Bindings)),
+ ?assertEqual([1], map_get(#{vhost=>"vhost-2",exchange=>"vhost-2-queue-with-messages-direct-exchange",type=>"direct"}, Bindings)),
+ ok.
+
+exchange_names_metric(Config) ->
+ {_, Body1} = http_get_with_pal(Config, "/metrics/detailed?family=exchange_names", [], 200),
+
+ Names = maps:filter(
+ fun
+ (#{exchange := [$a, $m, $q|_]}, _) ->
+ false;
+ (_, _) ->
+ true
+ end,
+ map_get(rabbitmq_cluster_exchange_name, parse_response(Body1))),
+
+ ?assertEqual(#{ #{vhost=>"vhost-2",exchange=>"vhost-2-queue-with-messages-topic-exchange",type=>"topic"} => [1],
+ #{vhost=>"vhost-2",exchange=>"vhost-2-queue-with-messages-direct-exchange",type=>"direct"} => [1],
+ #{vhost=>"vhost-1",exchange=>"vhost-1-queue-with-messages-topic-exchange",type=>"topic"} => [1],
+ #{vhost=>"vhost-1",exchange=>"vhost-1-queue-with-messages-direct-exchange",type=>"direct"} => [1],
+ #{vhost=>"/",exchange=>"default-queue-with-messages-topic-exchange",type=>"topic"} => [1],
+ #{vhost=>"/",exchange=>"default-queue-with-messages-direct-exchange",type=>"direct"} => [1],
+ #{vhost=>"vhost-2",exchange=>"vhost-2-queue-with-consumer-topic-exchange",type=>"topic"} => [1],
+ #{vhost=>"vhost-2",exchange=>"vhost-2-queue-with-consumer-direct-exchange",type=>"direct"} => [1],
+ #{vhost=>"vhost-1",exchange=>"vhost-1-queue-with-consumer-topic-exchange",type=>"topic"} => [1],
+ #{vhost=>"vhost-1",exchange=>"vhost-1-queue-with-consumer-direct-exchange",type=>"direct"} => [1],
+ #{vhost=>"/",exchange=>"default-queue-with-consumer-topic-exchange",type=>"topic"} => [1],
+ #{vhost=>"/",exchange=>"default-queue-with-consumer-direct-exchange",type=>"direct"} => [1]
+ }, Names),
+ ok.
+
+
http_get(Config, ReqHeaders, CodeExp) ->
Path = proplists:get_value(prometheus_path, Config, "/metrics"),
- Port = proplists:get_value(prometheus_port, Config, 15692),
+ http_get(Config, Path, ReqHeaders, CodeExp).
+
+http_get(Config, Path, ReqHeaders, CodeExp) ->
+ Port = rabbit_mgmt_test_util:config_port(Config, tcp_port_prometheus),
URI = lists:flatten(io_lib:format("http://localhost:~p~s", [Port, Path])),
{ok, {{_HTTP, CodeAct, _}, Headers, Body}} =
httpc:request(get, {URI, ReqHeaders}, ?HTTPC_OPTS, []),
@@ -276,7 +557,52 @@ http_get(Config, ReqHeaders, CodeExp) ->
{Headers, Body}.
http_get_with_pal(Config, ReqHeaders, CodeExp) ->
- {Headers, Body} = http_get(Config, ReqHeaders, CodeExp),
+ Path = proplists:get_value(prometheus_path, Config, "/metrics"),
+ http_get_with_pal(Config, Path, ReqHeaders, CodeExp).
+
+http_get_with_pal(Config, Path, ReqHeaders, CodeExp) ->
+ {Headers, Body} = http_get(Config, Path, ReqHeaders, CodeExp),
%% Print and log response body - it makes is easier to find why a match failed
ct:pal(Body),
{Headers, Body}.
+
+parse_response(Body) ->
+ Lines = string:split(Body, "\n", all),
+ Metrics = [ parse_metric(L)
+ || L = [C|_] <- Lines, C /= $#
+ ],
+ lists:foldl(fun ({Metric, Label, Value}, MetricMap) ->
+ case re:run(atom_to_list(Metric), "^(telemetry|rabbitmq_identity_info|rabbitmq_build_info)", [{capture, none}]) of
+ match ->
+ MetricMap;
+ _ ->
+ OldLabelMap = maps:get(Metric, MetricMap, #{}),
+ OldValues = maps:get(Label, OldLabelMap, []),
+ NewValues = [Value|OldValues],
+ NewLabelMap = maps:put(Label, NewValues, OldLabelMap),
+ maps:put(Metric, NewLabelMap, MetricMap)
+ end
+ end, #{}, Metrics).
+
+parse_metric(M) ->
+ case string:lexemes(M, "{}" ) of
+ [Metric, Label, Value] ->
+ {list_to_atom(Metric), parse_label(Label), parse_value(string:trim(Value))};
+ _ ->
+ [Metric, Value] = string:split(M, " "),
+ {list_to_atom(Metric), undefined, parse_value(string:trim(Value))}
+ end.
+
+parse_label(L) ->
+ Parts = string:split(L, ",", all),
+ maps:from_list([ parse_kv(P) || P <- Parts ]).
+
+parse_kv(KV) ->
+ [K, V] = string:split(KV, "="),
+ {list_to_atom(K), string:trim(V, both, [$"])}.
+
+parse_value(V) ->
+ case lists:all(fun (C) -> C >= $0 andalso C =< $9 end, V) of
+ true -> list_to_integer(V);
+ _ -> V
+ end.
diff --git a/deps/rabbitmq_random_exchange/BUILD.bazel b/deps/rabbitmq_random_exchange/BUILD.bazel
new file mode 100644
index 0000000000..4baa0fd962
--- /dev/null
+++ b/deps/rabbitmq_random_exchange/BUILD.bazel
@@ -0,0 +1,34 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_random_exchange"
+
+APP_DESCRIPTION = "RabbitMQ Random Exchange"
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
diff --git a/deps/rabbitmq_random_exchange/CONTRIBUTING.md b/deps/rabbitmq_random_exchange/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_random_exchange/CONTRIBUTING.md
+++ b/deps/rabbitmq_random_exchange/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_random_exchange/Makefile b/deps/rabbitmq_random_exchange/Makefile
index 3502c5656a..3804919948 100644
--- a/deps/rabbitmq_random_exchange/Makefile
+++ b/deps/rabbitmq_random_exchange/Makefile
@@ -13,5 +13,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_random_exchange/erlang.mk b/deps/rabbitmq_random_exchange/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_random_exchange/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_random_exchange/rabbitmq-components.mk b/deps/rabbitmq_random_exchange/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_random_exchange/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_random_exchange/src/rabbit_exchange_type_random.erl b/deps/rabbitmq_random_exchange/src/rabbit_exchange_type_random.erl
index 0c30664cd4..724ce6c239 100644
--- a/deps/rabbitmq_random_exchange/src/rabbit_exchange_type_random.erl
+++ b/deps/rabbitmq_random_exchange/src/rabbit_exchange_type_random.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_exchange_type_random).
diff --git a/deps/rabbitmq_recent_history_exchange/BUILD.bazel b/deps/rabbitmq_recent_history_exchange/BUILD.bazel
new file mode 100644
index 0000000000..2ceb015e2a
--- /dev/null
+++ b/deps/rabbitmq_recent_history_exchange/BUILD.bazel
@@ -0,0 +1,53 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_recent_history_exchange"
+
+APP_DESCRIPTION = "RabbitMQ Recent History Exchange"
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_recent_history_exchange"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_recent_history_exchange/CONTRIBUTING.md b/deps/rabbitmq_recent_history_exchange/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_recent_history_exchange/CONTRIBUTING.md
+++ b/deps/rabbitmq_recent_history_exchange/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_recent_history_exchange/Makefile b/deps/rabbitmq_recent_history_exchange/Makefile
index 98cafb7442..e0eeabcecf 100644
--- a/deps/rabbitmq_recent_history_exchange/Makefile
+++ b/deps/rabbitmq_recent_history_exchange/Makefile
@@ -17,5 +17,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_recent_history_exchange/erlang.mk b/deps/rabbitmq_recent_history_exchange/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_recent_history_exchange/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl b/deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl
index f143955733..e9cf9beeb0 100644
--- a/deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl
+++ b/deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(KEEP_NB, 20).
diff --git a/deps/rabbitmq_recent_history_exchange/rabbitmq-components.mk b/deps/rabbitmq_recent_history_exchange/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_recent_history_exchange/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl b/deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl
index 7758f72184..e4068ebb77 100644
--- a/deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl
+++ b/deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(rabbit_exchange_type_recent_history).
@@ -127,7 +127,7 @@ setup_schema() ->
{record_name, cached},
{type, set}]),
mnesia:add_table_copy(?RH_TABLE, node(), ram_copies),
- mnesia:wait_for_tables([?RH_TABLE], 30000),
+ rabbit_table:wait([?RH_TABLE]),
ok.
disable_plugin() ->
diff --git a/deps/rabbitmq_recent_history_exchange/test/system_SUITE.erl b/deps/rabbitmq_recent_history_exchange/test/system_SUITE.erl
index f14b4e549f..4649a672ff 100644
--- a/deps/rabbitmq_recent_history_exchange/test/system_SUITE.erl
+++ b/deps/rabbitmq_recent_history_exchange/test/system_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
-module(system_SUITE).
diff --git a/deps/rabbitmq_sharding/BUILD.bazel b/deps/rabbitmq_sharding/BUILD.bazel
new file mode 100644
index 0000000000..0b2165c306
--- /dev/null
+++ b/deps/rabbitmq_sharding/BUILD.bazel
@@ -0,0 +1,56 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_sharding"
+
+APP_DESCRIPTION = "RabbitMQ Sharding Plugin"
+
+DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_sharding"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_hash_exchange_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_sharding_SUITE",
+ deps = [
+ "//deps/rabbit:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_sharding/CONTRIBUTING.md b/deps/rabbitmq_sharding/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_sharding/CONTRIBUTING.md
+++ b/deps/rabbitmq_sharding/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_sharding/Makefile b/deps/rabbitmq_sharding/Makefile
index 5d77580517..544033327e 100644
--- a/deps/rabbitmq_sharding/Makefile
+++ b/deps/rabbitmq_sharding/Makefile
@@ -17,5 +17,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_sharding/README.md b/deps/rabbitmq_sharding/README.md
index 2be6b44231..f3e49b9e0e 100644
--- a/deps/rabbitmq_sharding/README.md
+++ b/deps/rabbitmq_sharding/README.md
@@ -36,6 +36,15 @@ Message distribution between shards (partitioning) is achieved
with a custom exchange type that distributes messages by applying
a hashing function to the routing key.
+## Sharding and Queue Replication
+
+Sharding performed by this plugin makes sense for **non-replicated classic queues** only.
+
+Combining sharding with a replicated queue type, e.g. [quorum queues]() or
+(**deprecated**) mirrored classic queues will lose most or all of the benefits offered
+by this plugin.
+
+Do not use this plugin with quorum queues. Avoid classic mirrored queues in general.
## Messages Distribution Between Shards (Partitioning)
@@ -67,7 +76,7 @@ of the number of shards (covered below) is not needed or desired, consider using
instead of this plugin.
-## Auto-scaling
+## Auto-scaling When Nodes are Added
One of the main properties of this plugin is that when a new node
is added to the RabbitMQ cluster, then the plugin will automatically create
@@ -115,18 +124,10 @@ queues in an uneven way.
## Load Distribution and Consumer Balancing
-As of RabbitMQ 3.8.1, the plugin is no longer affected by the queue master locator policy when using mirrored queues. Please read below if you use a previous version.
-
-This plugin can be affected by [queue master locator policy used](https://www.rabbitmq.com/ha.html) in
-the cluster as well as client connection load balancing strategy.
-
-"Minimum masters" is a queue master locator that is most in line with the goals of
-this plugin.
+Shard queues declaration by this plugin will ignore queue master locator policy, if any.
-For load balancers, the "least connections" strategy is more likely to produce an even distribution compared
-to round robin and other strategies.
-### How Evenly Will Messages Be Distributed?
+## How Evenly Will Messages Be Distributed?
As with many data distribution approaches based on a hashing function,
even distribution between shards depends on the distribution (variability) of inputs,
@@ -135,37 +136,18 @@ the more even will message distribution between shareds be. If all messages had
the same routing key, they would all end up on the same shard.
+## Installation
-## Installing ##
+This plugin ships with modern versions of RabbitMQ.
+Like all plugins, it [must be enabled](https://www.rabbitmq.com/plugins.html) before it can be used:
-### RabbitMQ 3.6.0 or later
-
-As of RabbitMQ `3.6.0` this plugin is included into the RabbitMQ distribution.
-
-Like any other [RabbitMQ plugin](https://www.rabbitmq.com/plugins.html) it has to be enabled before it can be used:
-
-```bash
-rabbitmq-plugins enable rabbitmq_sharding
-```
-
-You'd probably want to also enable the Consistent Hash Exchange
-plugin, too.
-
-### With Earlier Versions
-
-Install the corresponding .ez files from our
-[Community Plugins archive](https://www.rabbitmq.com/community-plugins/).
-
-Then run the following command:
-
-```bash
+``` bash
+# this might require sudo
rabbitmq-plugins enable rabbitmq_sharding
```
-You'd probably want to also enable the Consistent Hash Exchange
-plugin, too.
-## Usage ##
+## Usage
Once the plugin is installed you can define an exchange as sharded by
setting up a policy that matches the exchange name. For example if we
diff --git a/deps/rabbitmq_sharding/erlang.mk b/deps/rabbitmq_sharding/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_sharding/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_sharding/rabbitmq-components.mk b/deps/rabbitmq_sharding/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_sharding/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl
index e802570e55..84a12896c1 100644
--- a/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl
+++ b/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_sharding_exchange_decorator).
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl
index 0509972757..9424964f2e 100644
--- a/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl
+++ b/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_sharding_exchange_type_modulus_hash).
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl
index 4acba78827..a385b76c53 100644
--- a/deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl
+++ b/deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_sharding_interceptor).
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl
index da07765849..f4439d3bd8 100644
--- a/deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl
+++ b/deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_sharding_policy_validator).
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl
index 8c2365db88..16a4d1ccd7 100644
--- a/deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl
+++ b/deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_sharding_shard).
@@ -91,7 +91,7 @@ declare_queue(XName, Durable, N, Node) ->
catch
_Error:Reason ->
rabbit_log:error("sharding failed to declare queue for exchange ~p"
- " - soft error:~n~p~n",
+ " - soft error:~n~p",
[exchange_bin(XName), Reason]),
error
end.
diff --git a/deps/rabbitmq_sharding/src/rabbit_sharding_util.erl b/deps/rabbitmq_sharding/src/rabbit_sharding_util.erl
index cb5f719c65..4d5b7aea2f 100644
--- a/deps/rabbitmq_sharding/src/rabbit_sharding_util.erl
+++ b/deps/rabbitmq_sharding/src/rabbit_sharding_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_sharding_util).
diff --git a/deps/rabbitmq_sharding/test/src/rabbit_hash_exchange_SUITE.erl b/deps/rabbitmq_sharding/test/rabbit_hash_exchange_SUITE.erl
index 66ce3daa4c..6235a7e910 100644
--- a/deps/rabbitmq_sharding/test/src/rabbit_hash_exchange_SUITE.erl
+++ b/deps/rabbitmq_sharding/test/rabbit_hash_exchange_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_hash_exchange_SUITE).
diff --git a/deps/rabbitmq_sharding/test/src/rabbit_sharding_SUITE.erl b/deps/rabbitmq_sharding/test/rabbit_sharding_SUITE.erl
index 65f96a4e5d..3d1ab24eb7 100644
--- a/deps/rabbitmq_sharding/test/src/rabbit_sharding_SUITE.erl
+++ b/deps/rabbitmq_sharding/test/rabbit_sharding_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_sharding_SUITE).
diff --git a/deps/rabbitmq_shovel/BUILD.bazel b/deps/rabbitmq_shovel/BUILD.bazel
new file mode 100644
index 0000000000..69563d09f5
--- /dev/null
+++ b/deps/rabbitmq_shovel/BUILD.bazel
@@ -0,0 +1,163 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_shovel"
+
+APP_DESCRIPTION = "Data Shovel for RabbitMQ"
+
+APP_MODULE = "rabbit_shovel"
+
+APP_ENV = """[
+ {defaults, [
+ {prefetch_count, 1000},
+ {ack_mode, on_confirm},
+ {publish_fields, []},
+ {publish_properties, []},
+ {reconnect_delay, 5}
+ ]}
+ ]"""
+
+EXTRA_APPS = [
+ "crypto",
+]
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_cli:rabbitmqctl",
+]
+
+DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/amqp10_client:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ extra_apps = EXTRA_APPS,
+ first_srcs = [
+ "src/rabbit_shovel_behaviour.erl",
+ ],
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+rabbitmq_home(
+ name = "broker-for-tests-home",
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_amqp1_0:bazel_erlang_lib",
+ ":bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ home = ":broker-for-tests-home",
+)
+
+erlc(
+ name = "shovel_test_utils",
+ testonly = True,
+ srcs = [
+ "test/shovel_test_utils.erl",
+ ],
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+)
+
+PACKAGE = "deps/rabbitmq_shovel"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "amqp10_dynamic_SUITE",
+ additional_beam = [
+ ":shovel_test_utils",
+ ],
+ flaky = True,
+ ),
+ rabbitmq_suite(
+ name = "amqp10_shovel_SUITE",
+ size = "small",
+ runtime_deps = [
+ "@meck//:bazel_erlang_lib",
+ ],
+ deps = [
+ "//deps/amqp10_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "amqp10_SUITE",
+ ),
+ rabbitmq_suite(
+ name = "config_SUITE",
+ size = "small",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "configuration_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "delete_shovel_command_SUITE",
+ additional_beam = [
+ ":shovel_test_utils",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "dynamic_SUITE",
+ additional_beam = [
+ ":shovel_test_utils",
+ ],
+ flaky = True,
+ ),
+ rabbitmq_suite(
+ name = "parameters_SUITE",
+ size = "medium",
+ deps = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "shovel_status_command_SUITE",
+ additional_beam = [
+ ":shovel_test_utils",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_shovel/Makefile b/deps/rabbitmq_shovel/Makefile
index 06ad5fe759..493372cb80 100644
--- a/deps/rabbitmq_shovel/Makefile
+++ b/deps/rabbitmq_shovel/Makefile
@@ -36,5 +36,5 @@ dep_elvis_mk = git https://github.com/inaka/elvis.mk.git master
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_shovel/erlang.mk b/deps/rabbitmq_shovel/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_shovel/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_shovel/include/logging.hrl b/deps/rabbitmq_shovel/include/logging.hrl
new file mode 100644
index 0000000000..f619c34471
--- /dev/null
+++ b/deps/rabbitmq_shovel/include/logging.hrl
@@ -0,0 +1,3 @@
+-include_lib("rabbit_common/include/logging.hrl").
+
+-define(RMQLOG_DOMAIN_SHOVEL, ?DEFINE_RMQLOG_DOMAIN(shovel)).
diff --git a/deps/rabbitmq_shovel/include/rabbit_shovel.hrl b/deps/rabbitmq_shovel/include/rabbit_shovel.hrl
index 49d41d1e8f..5c6e18ffaa 100644
--- a/deps/rabbitmq_shovel/include/rabbit_shovel.hrl
+++ b/deps/rabbitmq_shovel/include/rabbit_shovel.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-record(endpoint,
diff --git a/deps/rabbitmq_shovel/rabbitmq-components.mk b/deps/rabbitmq_shovel/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_shovel/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteShovelCommand.erl b/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteShovelCommand.erl
index 59f2a8a5e9..f9618662e2 100644
--- a/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteShovelCommand.erl
+++ b/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteShovelCommand.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteShovelCommand').
@@ -11,6 +11,11 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref([
+ {'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Core.Helpers', cli_acting_user, 0}
+]).
+
-export([
usage/0,
usage_additional/0,
diff --git a/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartShovelCommand.erl b/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartShovelCommand.erl
index e6e64443ad..1f8c937b48 100644
--- a/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartShovelCommand.erl
+++ b/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.RestartShovelCommand.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.RestartShovelCommand').
@@ -11,6 +11,8 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref({'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1}).
+
-export([
usage/0,
usage_additional/0,
diff --git a/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ShovelStatusCommand.erl b/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ShovelStatusCommand.erl
index 4f007762fb..3622548c62 100644
--- a/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ShovelStatusCommand.erl
+++ b/deps/rabbitmq_shovel/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ShovelStatusCommand.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ShovelStatusCommand').
@@ -11,6 +11,8 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref({'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1}).
+
-export([
usage/0,
usage_doc_guides/0,
diff --git a/deps/rabbitmq_shovel/src/rabbit_amqp091_shovel.erl b/deps/rabbitmq_shovel/src/rabbit_amqp091_shovel.erl
index 44fc12183c..1f50abd05c 100644
--- a/deps/rabbitmq_shovel/src/rabbit_amqp091_shovel.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_amqp091_shovel.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp091_shovel).
@@ -41,12 +41,15 @@ parse(_Name, {source, Source}) ->
?DEFAULT_PREFETCH)),
Queue = parse_parameter(queue, fun parse_binary/1,
proplists:get_value(queue, Source)),
+ %% TODO parse
+ CArgs = proplists:get_value(consumer_args, Source, []),
#{module => ?MODULE,
uris => proplists:get_value(uris, Source),
resource_decl => decl_fun(Source),
queue => Queue,
delete_after => proplists:get_value(delete_after, Source, never),
- prefetch_count => Prefetch};
+ prefetch_count => Prefetch,
+ consumer_args => CArgs};
parse(Name, {destination, Dest}) ->
PubProp = proplists:get_value(publish_properties, Dest, []),
PropsFun = try_make_parse_publish(publish_properties, PubProp),
@@ -73,7 +76,8 @@ init_source(Conf = #{ack_mode := AckMode,
source := #{queue := Queue,
current := {Conn, Chan, _},
prefetch_count := Prefetch,
- resource_decl := Decl} = Src}) ->
+ resource_decl := Decl,
+ consumer_args := Args} = Src}) ->
Decl(Conn, Chan),
NoAck = AckMode =:= no_ack,
@@ -92,7 +96,8 @@ init_source(Conf = #{ack_mode := AckMode,
end,
#'basic.consume_ok'{} =
amqp_channel:subscribe(Chan, #'basic.consume'{queue = Queue,
- no_ack = NoAck}, self()),
+ no_ack = NoAck,
+ arguments = Args}, self()),
Conf#{source => Src#{remaining => Remaining,
remaining_unacked => Remaining}}.
@@ -338,11 +343,11 @@ do_make_conn_and_chan(URIs, ShovelName) ->
log_connection_failure(Reason, URI, {VHost, Name} = _ShovelName) ->
rabbit_log:error(
- "Shovel '~s' in vhost '~s' failed to connect (URI: ~s): ~s~n",
+ "Shovel '~s' in vhost '~s' failed to connect (URI: ~s): ~s",
[Name, VHost, amqp_uri:remove_credentials(URI), human_readable_connection_error(Reason)]);
log_connection_failure(Reason, URI, ShovelName) ->
rabbit_log:error(
- "Shovel '~s' failed to connect (URI: ~s): ~s~n",
+ "Shovel '~s' failed to connect (URI: ~s): ~s",
[ShovelName, amqp_uri:remove_credentials(URI), human_readable_connection_error(Reason)]).
human_readable_connection_error({auth_failure, Msg}) ->
diff --git a/deps/rabbitmq_shovel/src/rabbit_amqp10_shovel.erl b/deps/rabbitmq_shovel/src/rabbit_amqp10_shovel.erl
index 17e5fbba08..73b47ca809 100644
--- a/deps/rabbitmq_shovel/src/rabbit_amqp10_shovel.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_amqp10_shovel.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_amqp10_shovel).
@@ -36,7 +36,7 @@
-import(rabbit_misc, [pget/2, pget/3]).
-import(rabbit_data_coercion, [to_binary/1]).
--define(INFO(Text, Args), error_logger:info_msg(Text, Args)).
+-define(INFO(Text, Args), rabbit_log_shovel:info(Text, Args)).
-define(LINK_CREDIT_TIMEOUT, 5000).
-type state() :: rabbit_shovel_behaviour:state().
@@ -66,7 +66,8 @@ parse(_Name, {source, Conf}) ->
uris => Uris,
prefetch_count => pget(prefetch_count, Conf, 1000),
delete_after => pget(delete_after, Conf, never),
- source_address => pget(source_address, Conf)}.
+ source_address => pget(source_address, Conf),
+ consumer_args => pget(consumer_args, Conf, [])}.
-spec connect_source(state()) -> state().
connect_source(State = #{name := Name,
@@ -182,7 +183,7 @@ handle_source({amqp10_event, {connection, Conn, opened}},
handle_source({amqp10_event, {connection, Conn, {closed, Why}}},
#{source := #{current := #{conn := Conn}},
name := Name}) ->
- ?INFO("Shovel ~s source connection closed. Reason: ~p~n", [Name, Why]),
+ ?INFO("Shovel ~s source connection closed. Reason: ~p", [Name, Why]),
{stop, {inbound_conn_closed, Why}};
handle_source({amqp10_event, {session, Sess, begun}},
State = #{source := #{current := #{session := Sess}}}) ->
@@ -215,9 +216,8 @@ handle_dest({amqp10_disposition, {Result, Tag}},
{#{Tag := IncomingTag}, rejected} ->
{1, rabbit_shovel_behaviour:nack(IncomingTag, false, State1)};
_ -> % not found - this should ideally not happen
- error_logger:warning_msg("Shovel ~s amqp10 destination "
- "disposition tag not found: ~p~n",
- [Name, Tag]),
+ rabbit_log_shovel:warning("Shovel ~s amqp10 destination disposition tag not found: ~p",
+ [Name, Tag]),
{0, State1}
end,
rabbit_shovel_behaviour:decr_remaining(Decr, State);
@@ -227,7 +227,7 @@ handle_dest({amqp10_event, {connection, Conn, opened}},
handle_dest({amqp10_event, {connection, Conn, {closed, Why}}},
#{name := Name,
dest := #{current := #{conn := Conn}}}) ->
- ?INFO("Shovel ~s destination connection closed. Reason: ~p~n", [Name, Why]),
+ ?INFO("Shovel ~s destination connection closed. Reason: ~p", [Name, Why]),
{stop, {outbound_conn_died, Why}};
handle_dest({amqp10_event, {session, Sess, begun}},
State = #{dest := #{current := #{session := Sess}}}) ->
diff --git a/deps/rabbitmq_shovel/src/rabbit_log_shovel.erl b/deps/rabbitmq_shovel/src/rabbit_log_shovel.erl
new file mode 100644
index 0000000000..1e2b326c4a
--- /dev/null
+++ b/deps/rabbitmq_shovel/src/rabbit_log_shovel.erl
@@ -0,0 +1,107 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% @doc Compatibility module for the old Lager-based logging API.
+-module(rabbit_log_shovel).
+
+-export([debug/1, debug/2, debug/3,
+ info/1, info/2, info/3,
+ notice/1, notice/2, notice/3,
+ warning/1, warning/2, warning/3,
+ error/1, error/2, error/3,
+ critical/1, critical/2, critical/3,
+ alert/1, alert/2, alert/3,
+ emergency/1, emergency/2, emergency/3,
+ none/1, none/2, none/3]).
+
+-include("logging.hrl").
+
+-compile({no_auto_import, [error/2, error/3]}).
+
+%%----------------------------------------------------------------------------
+
+-spec debug(string()) -> 'ok'.
+-spec debug(string(), [any()]) -> 'ok'.
+-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec info(string()) -> 'ok'.
+-spec info(string(), [any()]) -> 'ok'.
+-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec notice(string()) -> 'ok'.
+-spec notice(string(), [any()]) -> 'ok'.
+-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec warning(string()) -> 'ok'.
+-spec warning(string(), [any()]) -> 'ok'.
+-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec error(string()) -> 'ok'.
+-spec error(string(), [any()]) -> 'ok'.
+-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec critical(string()) -> 'ok'.
+-spec critical(string(), [any()]) -> 'ok'.
+-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec alert(string()) -> 'ok'.
+-spec alert(string(), [any()]) -> 'ok'.
+-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec emergency(string()) -> 'ok'.
+-spec emergency(string(), [any()]) -> 'ok'.
+-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
+-spec none(string()) -> 'ok'.
+-spec none(string(), [any()]) -> 'ok'.
+-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
+
+%%----------------------------------------------------------------------------
+
+debug(Format) -> debug(Format, []).
+debug(Format, Args) -> debug(self(), Format, Args).
+debug(Pid, Format, Args) ->
+ logger:debug(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_SHOVEL}).
+
+info(Format) -> info(Format, []).
+info(Format, Args) -> info(self(), Format, Args).
+info(Pid, Format, Args) ->
+ logger:info(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_SHOVEL}).
+
+notice(Format) -> notice(Format, []).
+notice(Format, Args) -> notice(self(), Format, Args).
+notice(Pid, Format, Args) ->
+ logger:notice(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_SHOVEL}).
+
+warning(Format) -> warning(Format, []).
+warning(Format, Args) -> warning(self(), Format, Args).
+warning(Pid, Format, Args) ->
+ logger:warning(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_SHOVEL}).
+
+error(Format) -> error(Format, []).
+error(Format, Args) -> error(self(), Format, Args).
+error(Pid, Format, Args) ->
+ logger:error(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_SHOVEL}).
+
+critical(Format) -> critical(Format, []).
+critical(Format, Args) -> critical(self(), Format, Args).
+critical(Pid, Format, Args) ->
+ logger:critical(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_SHOVEL}).
+
+alert(Format) -> alert(Format, []).
+alert(Format, Args) -> alert(self(), Format, Args).
+alert(Pid, Format, Args) ->
+ logger:alert(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_SHOVEL}).
+
+emergency(Format) -> emergency(Format, []).
+emergency(Format, Args) -> emergency(self(), Format, Args).
+emergency(Pid, Format, Args) ->
+ logger:emergency(Format, Args, #{pid => Pid,
+ domain => ?RMQLOG_DOMAIN_SHOVEL}).
+
+none(_Format) -> ok.
+none(_Format, _Args) -> ok.
+none(_Pid, _Format, _Args) -> ok.
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel.erl b/deps/rabbitmq_shovel/src/rabbit_shovel.erl
index 190a9e2051..61035b0f89 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel).
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_behaviour.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_behaviour.erl
index 00cfe9b0ae..9ff6d4082c 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_behaviour.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_behaviour.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_behaviour).
@@ -48,7 +48,7 @@
ack_mode => ack_mode(),
atom() => term()}.
--export_type([state/0, source_config/0, dest_config/0, uri/0]).
+-export_type([state/0, source_config/0, dest_config/0, uri/0, tag/0]).
-callback parse(binary(), {source | destination, Conf :: proplists:proplist()}) ->
source_config() | dest_config().
@@ -166,7 +166,7 @@ decr_remaining(N, State = #{source := #{remaining := M} = Src,
case M > N of
true -> State#{source => Src#{remaining => M - N}};
false ->
- error_logger:info_msg("shutting down shovel ~s, none remaining ~p~n",
- [Name, State]),
+ rabbit_log_shovel:info("shutting down Shovel '~s', no messages left to transfer", [Name]),
+ rabbit_log_shovel:debug("shutting down Shovel '~s', no messages left to transfer. Shovel state: ~p", [Name, State]),
exit({shutdown, autodelete})
end.
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_config.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_config.erl
index 06678177d7..6b03e0bf1f 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_config.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_config.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_config).
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup.erl
index 00f97619d0..eb2cbe74df 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_dyn_worker_sup).
@@ -17,6 +17,18 @@
-define(SUPERVISOR, ?MODULE).
start_link(Name, Config) ->
+ ShovelParameter = rabbit_shovel_util:get_shovel_parameter(Name),
+ maybe_start_link(ShovelParameter, Name, Config).
+
+maybe_start_link(not_found, _Name, _Config) ->
+ %% See rabbitmq/rabbitmq-server#2655.
+ %% All dynamic shovels require that their associated parameter is present.
+ %% If not, this shovel has been deleted and stale child spec information
+ %% may still reside in the supervisor.
+ %%
+ %% We return 'ignore' to ensure that the child is not [re-]added in such case.
+ ignore;
+maybe_start_link(_, Name, Config) ->
supervisor2:start_link(?MODULE, [Name, Config]).
%%----------------------------------------------------------------------------
@@ -40,7 +52,6 @@ init([Name, Config0]) ->
%% reconnect-delay = 0 means "do not reconnect"
_ -> temporary
end,
-
{ok, {{one_for_one, 1, ?MAX_WAIT},
[{Name,
{rabbit_shovel_worker, start_link, [dynamic, Name, Config]},
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl
index 347b3d9d47..a448ab9856 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_dyn_worker_sup_sup.erl
@@ -2,15 +2,16 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_dyn_worker_sup_sup).
-behaviour(mirrored_supervisor).
--export([start_link/0, init/1, adjust/2, stop_child/1]).
+-export([start_link/0, init/1, adjust/2, stop_child/1, cleanup_specs/0]).
-import(rabbit_misc, [pget/2]).
+-import(rabbit_data_coercion, [to_map/1, to_list/1]).
-include("rabbit_shovel.hrl").
-include_lib("rabbit_common/include/rabbit.hrl").
@@ -35,27 +36,43 @@ adjust(Name, Def) ->
end,
start_child(Name, Def).
-start_child(Name, Def) ->
- case mirrored_supervisor:start_child(
+start_child({VHost, ShovelName} = Name, Def) ->
+ rabbit_log_shovel:debug("Asked to start a dynamic Shovel named '~s' in virtual host '~s'", [ShovelName, VHost]),
+ LockId = rabbit_shovel_locks:lock(Name),
+ cleanup_specs(),
+ rabbit_log_shovel:debug("Starting a mirrored supervisor named '~s' in virtual host '~s'", [ShovelName, VHost]),
+ Result = case mirrored_supervisor:start_child(
?SUPERVISOR,
- {Name, {rabbit_shovel_dyn_worker_sup, start_link, [Name, Def]},
+ {Name, {rabbit_shovel_dyn_worker_sup, start_link, [Name, obfuscated_uris_parameters(Def)]},
transient, ?WORKER_WAIT, worker, [rabbit_shovel_dyn_worker_sup]}) of
{ok, _Pid} -> ok;
{error, {already_started, _Pid}} -> ok
- end.
+ end,
+ %% release the lock if we managed to acquire one
+ rabbit_shovel_locks:unlock(LockId),
+ Result.
+
+obfuscated_uris_parameters(Def) when is_map(Def) ->
+ to_map(rabbit_shovel_parameters:obfuscate_uris_in_definition(to_list(Def)));
+obfuscated_uris_parameters(Def) when is_list(Def) ->
+ rabbit_shovel_parameters:obfuscate_uris_in_definition(Def).
child_exists(Name) ->
lists:any(fun ({N, _, _, _}) -> N =:= Name end,
mirrored_supervisor:which_children(?SUPERVISOR)).
-stop_child(Name) ->
- case get(shovel_worker_autodelete) of
+stop_child({VHost, ShovelName} = Name) ->
+ rabbit_log_shovel:debug("Asked to stop a dynamic Shovel named '~s' in virtual host '~s'", [ShovelName, VHost]),
+ LockId = rabbit_shovel_locks:lock(Name),
+ case get({shovel_worker_autodelete, Name}) of
true -> ok; %% [1]
- _ ->
+ _ ->
ok = mirrored_supervisor:terminate_child(?SUPERVISOR, Name),
ok = mirrored_supervisor:delete_child(?SUPERVISOR, Name),
rabbit_shovel_status:remove(Name)
- end.
+ end,
+ rabbit_shovel_locks:unlock(LockId),
+ ok.
%% [1] An autodeleting worker removes its own parameter, and thus ends
%% up here via the parameter callback. It is a transient worker that
@@ -63,7 +80,16 @@ stop_child(Name) ->
%% supervisor to stop us - and as usual if we call into our own
%% supervisor we risk deadlock.
%%
-%% See rabbit_shovel_worker:maybe_autodelete/1
+%% See rabbit_shovel_worker:terminate/2
+
+cleanup_specs() ->
+ SpecsSet = sets:from_list([element(1, S) || S <- mirrored_supervisor:which_children(?SUPERVISOR)]),
+ ParamsSet = sets:from_list(rabbit_runtime_parameters:list_component(<<"shovel">>)),
+ F = fun(Spec, ok) ->
+ _ = mirrored_supervisor:delete_child(?SUPERVISOR, Spec),
+ ok
+ end,
+ ok = sets:fold(F, ok, sets:subtract(SpecsSet, ParamsSet)).
%%----------------------------------------------------------------------------
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_locks.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_locks.erl
new file mode 100644
index 0000000000..4c1e438413
--- /dev/null
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_locks.erl
@@ -0,0 +1,32 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_shovel_locks).
+
+-export([lock/1, unlock/1]).
+
+%%
+%% API
+%%
+
+lock(Name) ->
+ Nodes = rabbit_nodes:all_running(),
+ Retries = rabbit_nodes:lock_retries(),
+ %% try to acquire a lock to avoid duplicate starts
+ LockId = case global:set_lock({dynamic_shovel, Name}, Nodes, Retries) of
+ true -> Name;
+ false -> undefined
+ end,
+ LockId.
+
+unlock(LockId) ->
+ Nodes = rabbit_nodes:all_running(),
+ case LockId of
+ undefined -> ok;
+ Value -> global:del_lock({dynamic_shovel, Value}, Nodes)
+ end,
+ ok.
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_parameters.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_parameters.erl
index df390c2b92..40316fb657 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_parameters.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_parameters.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_parameters).
@@ -13,8 +13,9 @@
-export([validate/5, notify/5, notify_clear/4]).
-export([register/0, unregister/0, parse/3]).
+-export([obfuscate_uris_in_definition/1]).
--import(rabbit_misc, [pget/2, pget/3]).
+-import(rabbit_misc, [pget/2, pget/3, pset/3]).
-rabbit_boot_step({?MODULE,
[{description, "shovel parameters"},
@@ -82,6 +83,16 @@ validate_amqp091_src(Def) ->
ok
end].
+obfuscate_uris_in_definition(Def) ->
+ SrcURIs = get_uris(<<"src-uri">>, Def),
+ ObfuscatedSrcURIsDef = pset(<<"src-uri">>, obfuscate_uris(SrcURIs), Def),
+ DestURIs = get_uris(<<"dest-uri">>, Def),
+ ObfuscatedDef = pset(<<"dest-uri">>, obfuscate_uris(DestURIs), ObfuscatedSrcURIsDef),
+ ObfuscatedDef.
+
+obfuscate_uris(URIs) ->
+ [credentials_obfuscation:encrypt(URI) || URI <- URIs].
+
validate_amqp091_dest(Def) ->
[case pget2(<<"dest-exchange">>, <<"dest-queue">>, Def) of
zero -> ok;
@@ -116,12 +127,14 @@ amqp10_src_validation(_Def, User) ->
amqp091_src_validation(_Def, User) ->
[
- {<<"src-uri">>, validate_uri_fun(User), mandatory},
- {<<"src-exchange">>, fun rabbit_parameter_validation:binary/2,optional},
- {<<"src-exchange-key">>,fun rabbit_parameter_validation:binary/2,optional},
- {<<"src-queue">>, fun rabbit_parameter_validation:binary/2,optional},
- {<<"prefetch-count">>, fun rabbit_parameter_validation:number/2,optional},
- {<<"src-prefetch-count">>, fun rabbit_parameter_validation:number/2,optional},
+ {<<"src-uri">>, validate_uri_fun(User), mandatory},
+ {<<"src-exchange">>, fun rabbit_parameter_validation:binary/2, optional},
+ {<<"src-exchange-key">>, fun rabbit_parameter_validation:binary/2, optional},
+ {<<"src-queue">>, fun rabbit_parameter_validation:binary/2, optional},
+ {<<"src-queue-args">>, fun validate_queue_args/2, optional},
+ {<<"src-consumer-args">>, fun validate_consumer_args/2, optional},
+ {<<"prefetch-count">>, fun rabbit_parameter_validation:number/2, optional},
+ {<<"src-prefetch-count">>, fun rabbit_parameter_validation:number/2, optional},
%% a deprecated pre-3.7 setting
{<<"delete-after">>, fun validate_delete_after/2, optional},
%% currently used multi-protocol friend name, introduced in 3.7
@@ -150,7 +163,8 @@ amqp091_dest_validation(_Def, User) ->
[{<<"dest-uri">>, validate_uri_fun(User), mandatory},
{<<"dest-exchange">>, fun rabbit_parameter_validation:binary/2,optional},
{<<"dest-exchange-key">>,fun rabbit_parameter_validation:binary/2,optional},
- {<<"dest-queue">>, fun rabbit_parameter_validation:binary/2,optional},
+ {<<"dest-queue">>, fun rabbit_parameter_validation:amqp091_queue_name/2,optional},
+ {<<"dest-queue-args">>, fun validate_queue_args/2, optional},
{<<"add-forward-headers">>, fun rabbit_parameter_validation:boolean/2,optional},
{<<"add-timestamp-header">>, fun rabbit_parameter_validation:boolean/2,optional},
{<<"dest-add-forward-headers">>, fun rabbit_parameter_validation:boolean/2,optional},
@@ -206,6 +220,16 @@ validate_delete_after(Name, Term) ->
{error, "~s should be number, \"never\" or \"queue-length\", actually was "
"~p", [Name, Term]}.
+validate_queue_args(Name, Term0) ->
+ Term = rabbit_data_coercion:to_proplist(Term0),
+
+ rabbit_parameter_validation:proplist(Name, rabbit_amqqueue:declare_args(), Term).
+
+validate_consumer_args(Name, Term0) ->
+ Term = rabbit_data_coercion:to_proplist(Term0),
+
+ rabbit_parameter_validation:proplist(Name, rabbit_amqqueue:consume_args(), Term).
+
validate_amqp10_map(Name, Terms0) ->
Terms = rabbit_data_coercion:to_proplist(Terms0),
Str = fun rabbit_parameter_validation:binary/2,
@@ -266,7 +290,7 @@ parse_dest(VHostName, ClusterName, Def, SourceHeaders) ->
end.
parse_amqp10_dest({_VHost, _Name}, _ClusterName, Def, SourceHeaders) ->
- Uris = get_uris(<<"dest-uri">>, Def),
+ Uris = deobfuscated_uris(<<"dest-uri">>, Def),
Address = pget(<<"dest-address">>, Def),
Properties =
rabbit_data_coercion:to_proplist(
@@ -292,14 +316,15 @@ parse_amqp10_dest({_VHost, _Name}, _ClusterName, Def, SourceHeaders) ->
}.
parse_amqp091_dest({VHost, Name}, ClusterName, Def, SourceHeaders) ->
- DestURIs = get_uris(<<"dest-uri">>, Def),
- DestX = pget(<<"dest-exchange">>, Def, none),
- DestXKey = pget(<<"dest-exchange-key">>, Def, none),
- DestQ = pget(<<"dest-queue">>, Def, none),
+ DestURIs = deobfuscated_uris(<<"dest-uri">>, Def),
+ DestX = pget(<<"dest-exchange">>, Def, none),
+ DestXKey = pget(<<"dest-exchange-key">>, Def, none),
+ DestQ = pget(<<"dest-queue">>, Def, none),
+ DestQArgs = pget(<<"dest-queue-args">>, Def, #{}),
DestDeclFun = fun (Conn, _Ch) ->
case DestQ of
none -> ok;
- _ -> ensure_queue(Conn, DestQ)
+ _ -> ensure_queue(Conn, DestQ, rabbit_misc:to_amqp_table(DestQArgs))
end
end,
{X, Key} = case DestQ of
@@ -359,7 +384,7 @@ parse_amqp091_dest({VHost, Name}, ClusterName, Def, SourceHeaders) ->
}, Details).
parse_amqp10_source(Def) ->
- Uris = get_uris(<<"src-uri">>, Def),
+ Uris = deobfuscated_uris(<<"src-uri">>, Def),
Address = pget(<<"src-address">>, Def),
DeleteAfter = pget(<<"src-delete-after">>, Def, <<"never">>),
PrefetchCount = pget(<<"src-prefetch-count">>, Def, 1000),
@@ -368,13 +393,16 @@ parse_amqp10_source(Def) ->
uris => Uris,
source_address => Address,
delete_after => opt_b2a(DeleteAfter),
- prefetch_count => PrefetchCount}, Headers}.
+ prefetch_count => PrefetchCount,
+ consumer_args => []}, Headers}.
parse_amqp091_source(Def) ->
- SrcURIs = get_uris(<<"src-uri">>, Def),
- SrcX = pget(<<"src-exchange">>,Def, none),
- SrcXKey = pget(<<"src-exchange-key">>, Def, <<>>), %% [1]
- SrcQ = pget(<<"src-queue">>, Def, none),
+ SrcURIs = deobfuscated_uris(<<"src-uri">>, Def),
+ SrcX = pget(<<"src-exchange">>,Def, none),
+ SrcXKey = pget(<<"src-exchange-key">>, Def, <<>>), %% [1]
+ SrcQ = pget(<<"src-queue">>, Def, none),
+ SrcQArgs = pget(<<"src-queue-args">>, Def, #{}),
+ SrcCArgs = rabbit_misc:to_amqp_table(pget(<<"src-consumer-args">>, Def, [])),
{SrcDeclFun, Queue, DestHeaders} =
case SrcQ of
none -> {fun (_Conn, Ch) ->
@@ -385,7 +413,7 @@ parse_amqp091_source(Def) ->
end, <<>>, [{<<"src-exchange">>, SrcX},
{<<"src-exchange-key">>, SrcXKey}]};
_ -> {fun (Conn, _Ch) ->
- ensure_queue(Conn, SrcQ)
+ ensure_queue(Conn, SrcQ, rabbit_misc:to_amqp_table(SrcQArgs))
end, SrcQ, [{<<"src-queue">>, SrcQ}]}
end,
DeleteAfter = pget(<<"src-delete-after">>, Def,
@@ -402,7 +430,8 @@ parse_amqp091_source(Def) ->
resource_decl => SrcDeclFun,
queue => Queue,
delete_after => opt_b2a(DeleteAfter),
- prefetch_count => PrefetchCount
+ prefetch_count => PrefetchCount,
+ consumer_args => SrcCArgs
}, Details), DestHeaders}.
get_uris(Key, Def) ->
@@ -412,19 +441,25 @@ get_uris(Key, Def) ->
end,
[binary_to_list(URI) || URI <- URIs].
+deobfuscated_uris(Key, Def) ->
+ ObfuscatedURIs = pget(Key, Def),
+ URIs = [credentials_obfuscation:decrypt(ObfuscatedURI) || ObfuscatedURI <- ObfuscatedURIs],
+ [binary_to_list(URI) || URI <- URIs].
+
translate_ack_mode(<<"on-confirm">>) -> on_confirm;
translate_ack_mode(<<"on-publish">>) -> on_publish;
translate_ack_mode(<<"no-ack">>) -> no_ack.
-ensure_queue(Conn, Queue) ->
+ensure_queue(Conn, Queue, XArgs) ->
{ok, Ch} = amqp_connection:open_channel(Conn),
try
amqp_channel:call(Ch, #'queue.declare'{queue = Queue,
passive = true})
catch exit:{{shutdown, {server_initiated_close, ?NOT_FOUND, _Text}}, _} ->
{ok, Ch2} = amqp_connection:open_channel(Conn),
- amqp_channel:call(Ch2, #'queue.declare'{queue = Queue,
- durable = true}),
+ amqp_channel:call(Ch2, #'queue.declare'{queue = Queue,
+ durable = true,
+ arguments = XArgs}),
catch amqp_channel:close(Ch2)
after
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_status.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_status.erl
index d4836bea81..16b68decfd 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_status.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_status.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_status).
@@ -17,8 +17,9 @@
-define(SERVER, ?MODULE).
-define(ETS_NAME, ?MODULE).
+-define(CHECK_FREQUENCY, 60000).
--record(state, {}).
+-record(state, {timer}).
-record(entry, {name, type, info, timestamp}).
start_link() ->
@@ -39,7 +40,7 @@ lookup(Name) ->
init([]) ->
?ETS_NAME = ets:new(?ETS_NAME,
[named_table, {keypos, #entry.name}, private]),
- {ok, #state{}}.
+ {ok, ensure_timer(#state{})}.
handle_call(status, _From, State) ->
Entries = ets:tab2list(?ETS_NAME),
@@ -69,10 +70,14 @@ handle_cast({remove, Name}, State) ->
rabbit_event:notify(shovel_worker_removed, split_name(Name)),
{noreply, State}.
+handle_info(check, State) ->
+ rabbit_shovel_dyn_worker_sup_sup:cleanup_specs(),
+ {noreply, ensure_timer(State)};
handle_info(_Info, State) ->
{noreply, State}.
-terminate(_Reason, _State) ->
+terminate(_Reason, State) ->
+ rabbit_misc:stop_timer(State, #state.timer),
ok.
code_change(_OldVsn, State, _Extra) ->
@@ -86,3 +91,7 @@ split_status(Status) when is_atom(Status) -> [{status, Status}].
split_name({VHost, Name}) -> [{name, Name},
{vhost, VHost}];
split_name(Name) when is_atom(Name) -> [{name, Name}].
+
+ensure_timer(State0) ->
+ State1 = rabbit_misc:stop_timer(State0, #state.timer),
+ rabbit_misc:ensure_timer(State1, #state.timer, ?CHECK_FREQUENCY, check).
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_sup.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_sup.erl
index f04b4758a3..2e18a8db83 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_sup.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_sup).
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_util.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_util.erl
index 509db7de00..ef2ce4f42f 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_util.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_util).
@@ -10,7 +10,8 @@
-export([update_headers/5,
add_timestamp_header/1,
delete_shovel/3,
- restart_shovel/2]).
+ restart_shovel/2,
+ get_shovel_parameter/1]).
-include_lib("rabbit_common/include/rabbit_framing.hrl").
@@ -48,7 +49,13 @@ restart_shovel(VHost, Name) ->
not_found ->
{error, not_found};
_Obj ->
+ rabbit_log_shovel:info("Shovel '~s' in virtual host '~s' will be restarted", [Name, VHost]),
ok = rabbit_shovel_dyn_worker_sup_sup:stop_child({VHost, Name}),
{ok, _} = rabbit_shovel_dyn_worker_sup_sup:start_link(),
ok
end.
+
+get_shovel_parameter({VHost, ShovelName}) ->
+ rabbit_runtime_parameters:lookup(VHost, <<"shovel">>, ShovelName);
+get_shovel_parameter(ShovelName) ->
+ rabbit_runtime_parameters:lookup(<<"/">>, <<"shovel">>, ShovelName).
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_worker.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_worker.erl
index 919db25910..dfee63c2f4 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_worker.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_worker.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_worker).
@@ -27,6 +27,18 @@
%% [2] Counts down until we stop publishing in on-confirm mode
start_link(Type, Name, Config) ->
+ ShovelParameter = rabbit_shovel_util:get_shovel_parameter(Name),
+ maybe_start_link(ShovelParameter, Type, Name, Config).
+
+maybe_start_link(not_found, dynamic, _Name, _Config) ->
+ %% See rabbitmq/rabbitmq-server#2655.
+ %% All dynamic shovels require that their associated parameter is present.
+ %% If not, this shovel has been deleted and stale child spec information
+ %% may still reside in the supervisor.
+ %%
+ %% We return 'ignore' to ensure that the child is not [re-]added in such case.
+ ignore;
+maybe_start_link(_, Type, Name, Config) ->
ok = rabbit_shovel_status:report(Name, Type, starting),
gen_server2:start_link(?MODULE, [Type, Name, Config], []).
@@ -120,15 +132,16 @@ handle_info(Msg, State = #state{config = Config, name = Name}) ->
{noreply, State#state{config = Config1}}
end.
-terminate({shutdown, autodelete}, State = #state{name = {VHost, Name},
+terminate({shutdown, autodelete}, State = #state{name = Name,
type = dynamic}) ->
+ {VHost, ShovelName} = Name,
rabbit_log_shovel:info("Shovel '~s' is stopping (it was configured to autodelete and transfer is completed)",
- [human_readable_name({VHost, Name})]),
+ [human_readable_name(Name)]),
close_connections(State),
%% See rabbit_shovel_dyn_worker_sup_sup:stop_child/1
- put(shovel_worker_autodelete, true),
- _ = rabbit_runtime_parameters:clear(VHost, <<"shovel">>, Name, ?SHOVEL_USER),
- rabbit_shovel_status:remove({VHost, Name}),
+ put({shovel_worker_autodelete, Name}, true),
+ _ = rabbit_runtime_parameters:clear(VHost, <<"shovel">>, ShovelName, ?SHOVEL_USER),
+ rabbit_shovel_status:remove(Name),
ok;
terminate(shutdown, State) ->
close_connections(State),
@@ -166,6 +179,14 @@ terminate({shutdown, restart}, State = #state{name = Name}) ->
{terminated, "needed a restart"}),
close_connections(State),
ok;
+terminate({{shutdown, {server_initiated_close, Code, Reason}}, _}, State = #state{name = Name}) ->
+ rabbit_log_shovel:error("Shovel ~s is stopping: one of its connections closed "
+ "with code ~b, reason: ~s",
+ [human_readable_name(Name), Code, Reason]),
+ rabbit_shovel_status:report(State#state.name, State#state.type,
+ {terminated, "needed a restart"}),
+ close_connections(State),
+ ok;
terminate(Reason, State = #state{name = Name}) ->
rabbit_log_shovel:error("Shovel ~s is stopping, reason: ~p", [human_readable_name(Name), Reason]),
rabbit_shovel_status:report(State#state.name, State#state.type,
diff --git a/deps/rabbitmq_shovel/src/rabbit_shovel_worker_sup.erl b/deps/rabbitmq_shovel/src/rabbit_shovel_worker_sup.erl
index afa8f7987e..f409bf0aff 100644
--- a/deps/rabbitmq_shovel/src/rabbit_shovel_worker_sup.erl
+++ b/deps/rabbitmq_shovel/src/rabbit_shovel_worker_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_worker_sup).
diff --git a/deps/rabbitmq_shovel/test/amqp10_SUITE.erl b/deps/rabbitmq_shovel/test/amqp10_SUITE.erl
index 37cbcb6c56..eeea656c34 100644
--- a/deps/rabbitmq_shovel/test/amqp10_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/amqp10_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_SUITE).
diff --git a/deps/rabbitmq_shovel/test/amqp10_dynamic_SUITE.erl b/deps/rabbitmq_shovel/test/amqp10_dynamic_SUITE.erl
index c8375ead1a..12f5fad782 100644
--- a/deps/rabbitmq_shovel/test/amqp10_dynamic_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/amqp10_dynamic_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_dynamic_SUITE).
@@ -45,9 +45,7 @@ groups() ->
init_per_suite(Config0) ->
{ok, _} = application:ensure_all_started(amqp10_client),
rabbit_ct_helpers:log_environment(),
- Config = rabbit_ct_helpers:merge_app_env(Config0,
- [{lager, [{error_logger_hwm, 200}]}]),
- Config1 = rabbit_ct_helpers:set_config(Config, [
+ Config1 = rabbit_ct_helpers:set_config(Config0, [
{rmq_nodename_suffix, ?MODULE}
]),
rabbit_ct_helpers:run_setup_steps(Config1,
diff --git a/deps/rabbitmq_shovel/test/amqp10_shovel_SUITE.erl b/deps/rabbitmq_shovel/test/amqp10_shovel_SUITE.erl
index 8c52889ed6..955ba949c1 100644
--- a/deps/rabbitmq_shovel/test/amqp10_shovel_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/amqp10_shovel_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp10_shovel_SUITE).
diff --git a/deps/rabbitmq_shovel/test/config_SUITE.erl b/deps/rabbitmq_shovel/test/config_SUITE.erl
index d637ad1048..2f0e2f8cdd 100644
--- a/deps/rabbitmq_shovel/test/config_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/config_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_SUITE).
diff --git a/deps/rabbitmq_shovel/test/configuration_SUITE.erl b/deps/rabbitmq_shovel/test/configuration_SUITE.erl
index 03f287c85d..0218af7677 100644
--- a/deps/rabbitmq_shovel/test/configuration_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/configuration_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(configuration_SUITE).
diff --git a/deps/rabbitmq_shovel/test/delete_shovel_command_SUITE.erl b/deps/rabbitmq_shovel/test/delete_shovel_command_SUITE.erl
index 6af7a39c65..6176a49dd3 100644
--- a/deps/rabbitmq_shovel/test/delete_shovel_command_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/delete_shovel_command_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(delete_shovel_command_SUITE).
diff --git a/deps/rabbitmq_shovel/test/dynamic_SUITE.erl b/deps/rabbitmq_shovel/test/dynamic_SUITE.erl
index 5311af3c6d..52e7e034a0 100644
--- a/deps/rabbitmq_shovel/test/dynamic_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/dynamic_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(dynamic_SUITE).
@@ -14,12 +14,14 @@
all() ->
[
- {group, non_parallel_tests}
+ {group, core_tests},
+ {group, quorum_queue_tests},
+ {group, stream_queue_tests}
].
groups() ->
[
- {non_parallel_tests, [], [
+ {core_tests, [], [
simple,
set_properties_using_proplist,
set_properties_using_map,
@@ -33,6 +35,14 @@ groups() ->
validation,
security_validation,
get_connection_name
+ ]},
+
+ {quorum_queue_tests, [], [
+ quorum_queues
+ ]},
+
+ {stream_queue_tests, [], [
+ stream_queues
]}
].
@@ -54,6 +64,16 @@ end_per_suite(Config) ->
rabbit_ct_client_helpers:teardown_steps() ++
rabbit_ct_broker_helpers:teardown_steps()).
+init_per_group(quorum_queue_tests, Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ false -> Config;
+ _ -> {skip, "quorum queue tests are skipped in mixed mode"}
+ end;
+init_per_group(stream_queue_tests, Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ false -> Config;
+ _ -> {skip, "stream queue tests are skipped in mixed mode"}
+ end;
init_per_group(_, Config) ->
Config.
@@ -80,6 +100,34 @@ simple(Config) ->
publish_expect(Ch, <<>>, <<"src">>, <<"dest">>, <<"hello">>)
end).
+quorum_queues(Config) ->
+ with_ch(Config,
+ fun (Ch) ->
+ shovel_test_utils:set_param(
+ Config,
+ <<"test">>, [
+ {<<"src-queue">>, <<"src">>},
+ {<<"dest-queue">>, <<"dest">>},
+ {<<"src-queue-args">>, #{<<"x-queue-type">> => <<"quorum">>}},
+ {<<"dest-queue-args">>, #{<<"x-queue-type">> => <<"quorum">>}}
+ ]),
+ publish_expect(Ch, <<>>, <<"src">>, <<"dest">>, <<"hello">>)
+ end).
+
+stream_queues(Config) ->
+ with_ch(Config,
+ fun (Ch) ->
+ shovel_test_utils:set_param(
+ Config,
+ <<"test">>, [
+ {<<"src-queue">>, <<"src">>},
+ {<<"dest-queue">>, <<"dest">>},
+ {<<"src-queue-args">>, #{<<"x-queue-type">> => <<"stream">>}},
+ {<<"src-consumer-args">>, #{<<"x-stream-offset">> => <<"first">>}}
+ ]),
+ publish_expect(Ch, <<>>, <<"src">>, <<"dest">>, <<"hello">>)
+ end).
+
set_properties_using_map(Config) ->
with_ch(Config,
fun (Ch) ->
diff --git a/deps/rabbitmq_shovel/test/parameters_SUITE.erl b/deps/rabbitmq_shovel/test/parameters_SUITE.erl
index 516b1bd190..ee40614983 100644
--- a/deps/rabbitmq_shovel/test/parameters_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/parameters_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(parameters_SUITE).
@@ -43,9 +43,18 @@ groups() ->
%% -------------------------------------------------------------------
init_per_suite(Config) ->
+ {ok, _} = application:ensure_all_started(credentials_obfuscation),
+ Secret = crypto:strong_rand_bytes(128),
+ ok = credentials_obfuscation:set_secret(Secret),
Config.
end_per_suite(Config) ->
+ case application:stop(credentials_obfuscation) of
+ ok ->
+ ok;
+ {error, {not_started, credentials_obfuscation}} ->
+ ok
+ end,
Config.
init_per_group(_, Config) ->
@@ -54,9 +63,11 @@ init_per_group(_, Config) ->
end_per_group(_, Config) ->
Config.
-init_per_testcase(_Testcase, Config) -> Config.
+init_per_testcase(_Testcase, Config) ->
+ Config.
-end_per_testcase(_Testcase, Config) -> Config.
+end_per_testcase(_Testcase, Config) ->
+ Config.
%% -------------------------------------------------------------------
@@ -140,8 +151,9 @@ parse_amqp091_empty_proplists(_Config) ->
test_parse_amqp091(Params) ->
+ ObfuscatedParams = rabbit_shovel_parameters:obfuscate_uris_in_definition(Params),
{ok, Result} = rabbit_shovel_parameters:parse({"vhost", "name"},
- "my-cluster", Params),
+ "my-cluster", ObfuscatedParams),
#{ack_mode := on_publish,
name := "name",
reconnect_delay := 1001,
@@ -165,8 +177,9 @@ test_parse_amqp091(Params) ->
ok.
test_parse_amqp091_with_blank_proprties(Params) ->
+ ObfuscatedParams = rabbit_shovel_parameters:obfuscate_uris_in_definition(Params),
{ok, Result} = rabbit_shovel_parameters:parse({"vhost", "name"},
- "my-cluster", Params),
+ "my-cluster", ObfuscatedParams),
#{ack_mode := on_publish,
name := "name",
reconnect_delay := 1001,
@@ -229,7 +242,7 @@ parse_amqp10(_Config) ->
<<"message-ann-value">>}]},
{<<"dest-properties">>, [{<<"user_id">>, <<"some-user">>}]}
],
-
+ ObfuscatedParams = rabbit_shovel_parameters:obfuscate_uris_in_definition(Params),
?assertMatch(
{ok, #{name := "my_shovel",
ack_mode := on_publish,
@@ -252,7 +265,7 @@ parse_amqp10(_Config) ->
}
}},
rabbit_shovel_parameters:parse({"vhost", "my_shovel"}, "my-cluster",
- Params)),
+ ObfuscatedParams)),
ok.
parse_amqp10_minimal(_Config) ->
@@ -266,6 +279,7 @@ parse_amqp10_minimal(_Config) ->
{<<"dest-uri">>, <<"amqp://remotehost:5672">>},
{<<"dest-address">>, <<"a-dest-queue">>}
],
+ ObfuscatedParams = rabbit_shovel_parameters:obfuscate_uris_in_definition(Params),
?assertMatch(
{ok, #{name := "my_shovel",
ack_mode := on_confirm,
@@ -281,7 +295,7 @@ parse_amqp10_minimal(_Config) ->
}
}},
rabbit_shovel_parameters:parse({"vhost", "my_shovel"}, "my-cluster",
- Params)),
+ ObfuscatedParams)),
ok.
validate_amqp10(_Config) ->
diff --git a/deps/rabbitmq_shovel/test/shovel_status_command_SUITE.erl b/deps/rabbitmq_shovel/test/shovel_status_command_SUITE.erl
index 667822b20c..b2fc593f43 100644
--- a/deps/rabbitmq_shovel/test/shovel_status_command_SUITE.erl
+++ b/deps/rabbitmq_shovel/test/shovel_status_command_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(shovel_status_command_SUITE).
diff --git a/deps/rabbitmq_shovel/test/shovel_test_utils.erl b/deps/rabbitmq_shovel/test/shovel_test_utils.erl
index 6e49ada8a1..7e14d78552 100644
--- a/deps/rabbitmq_shovel/test/shovel_test_utils.erl
+++ b/deps/rabbitmq_shovel/test/shovel_test_utils.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(shovel_test_utils).
diff --git a/deps/rabbitmq_shovel_management/BUILD.bazel b/deps/rabbitmq_shovel_management/BUILD.bazel
new file mode 100644
index 0000000000..a466c0115c
--- /dev/null
+++ b/deps/rabbitmq_shovel_management/BUILD.bazel
@@ -0,0 +1,74 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_shovel_management"
+
+APP_DESCRIPTION = "Management extension for the Shovel plugin"
+
+BUILD_DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_management:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_shovel:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+rabbitmq_home(
+ name = "broker-for-tests-home",
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_amqp1_0:bazel_erlang_lib",
+ ":bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ home = ":broker-for-tests-home",
+)
+
+PACKAGE = "deps/rabbitmq_shovel_management"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "http_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_shovel_management/Makefile b/deps/rabbitmq_shovel_management/Makefile
index 0a8740dc6e..ddd55a891a 100644
--- a/deps/rabbitmq_shovel_management/Makefile
+++ b/deps/rabbitmq_shovel_management/Makefile
@@ -17,5 +17,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_shovel_management/erlang.mk b/deps/rabbitmq_shovel_management/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_shovel_management/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_shovel_management/priv/www/js/shovel.js b/deps/rabbitmq_shovel_management/priv/www/js/shovel.js
index a4f85cb988..7aa174bc21 100644
--- a/deps/rabbitmq_shovel_management/priv/www/js/shovel.js
+++ b/deps/rabbitmq_shovel_management/priv/www/js/shovel.js
@@ -22,6 +22,10 @@ dispatcher_add(function(sammy) {
if (redirect !== undefined) {
delete this.params['redirect'];
}
+ var stream_offset = this.params['src-consumer-args-stream-offset'];
+ var src_consumer_args = {'x-stream-offset': stream_offset};
+ delete this.params['src-consumer-args-stream-offset'];
+ this.params['src-consumer-args'] = src_consumer_args;
put_parameter(this, [], num_keys, bool_keys, arrayable_keys);
if (redirect !== undefined) {
go_to(redirect);
diff --git a/deps/rabbitmq_shovel_management/rabbitmq-components.mk b/deps/rabbitmq_shovel_management/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_shovel_management/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_shovel_management/src/rabbit_shovel_mgmt.erl b/deps/rabbitmq_shovel_management/src/rabbit_shovel_mgmt.erl
index a843472e0f..9b4f4265ca 100644
--- a/deps/rabbitmq_shovel_management/src/rabbit_shovel_mgmt.erl
+++ b/deps/rabbitmq_shovel_management/src/rabbit_shovel_mgmt.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_shovel_mgmt).
diff --git a/deps/rabbitmq_shovel_management/test/http_SUITE.erl b/deps/rabbitmq_shovel_management/test/http_SUITE.erl
index 4248e90419..7cd9f3e049 100644
--- a/deps/rabbitmq_shovel_management/test/http_SUITE.erl
+++ b/deps/rabbitmq_shovel_management/test/http_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(http_SUITE).
@@ -166,6 +166,27 @@ shovels(Config) ->
'dest-queue' => <<"test2">>}}, ?CREATED)
|| V <- ["%2f", "v"]],
+ [http_put(Config, "/parameters/shovel/" ++ V ++ "/my-dynamic",
+ #{value => #{'src-protocol' => <<"amqp091">>,
+ 'src-uri' => <<"amqp://">>,
+ 'src-queue' => <<"test">>,
+ 'dest-protocol' => <<"amqp091">>,
+ 'dest-uri' => <<"amqp://">>,
+ 'dest-queue' => list_to_binary(
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq"
+ "test2qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq")}},
+ ?BAD_REQUEST)
+ || V <- ["%2f", "v"]],
+
?assertMatch([?StaticPattern, ?Dynamic1Pattern, ?Dynamic2Pattern],
http_get(Config, "/shovels", "guest", "guest", ?OK)),
?assertMatch([?Dynamic1Pattern],
diff --git a/deps/rabbitmq_stomp/BUILD.bazel b/deps/rabbitmq_stomp/BUILD.bazel
new file mode 100644
index 0000000000..e8c22be6a9
--- /dev/null
+++ b/deps/rabbitmq_stomp/BUILD.bazel
@@ -0,0 +1,166 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_stomp"
+
+APP_DESCRIPTION = "RabbitMQ STOMP plugin"
+
+APP_MODULE = "rabbit_stomp"
+
+APP_ENV = """[
+ {default_user,
+ [{login, <<"guest">>},
+ {passcode, <<"guest">>}]},
+ {default_vhost, <<"/">>},
+ {default_topic_exchange, <<"amq.topic">>},
+ {default_nack_requeue, true},
+ {ssl_cert_login, false},
+ {implicit_connect, false},
+ {tcp_listeners, [61613]},
+ {ssl_listeners, []},
+ {num_tcp_acceptors, 10},
+ {num_ssl_acceptors, 10},
+ {tcp_listen_options, [{backlog, 128},
+ {nodelay, true}]},
+ %% see rabbitmq/rabbitmq-stomp#39
+ {trailing_lf, true},
+ %% see rabbitmq/rabbitmq-stomp#57
+ {hide_server_info, false},
+ {proxy_protocol, false}
+ ]"""
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_cli:rabbitmqctl",
+]
+
+DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@ranch//:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_stomp"
+
+erlc(
+ name = "rabbit_stomp_client",
+ testonly = True,
+ srcs = [
+ "test/src/rabbit_stomp_client.erl",
+ ],
+ hdrs = glob([
+ "include/**/*.hrl",
+ "src/**/*.hrl",
+ ]),
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+ deps = [
+ ":test_bazel_erlang_lib",
+ ],
+)
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "command_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":rabbit_stomp_client",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "connections_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":rabbit_stomp_client",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "frame_SUITE",
+ size = "small",
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "proxy_protocol_SUITE",
+ size = "medium",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "python_SUITE",
+ flaky = True,
+ shard_count = 3,
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":rabbit_stomp_client",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "topic_SUITE",
+ size = "medium",
+ additional_beam = [
+ ":rabbit_stomp_client",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "util_SUITE",
+ size = "medium",
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_stomp/Makefile b/deps/rabbitmq_stomp/Makefile
index a8a3e57c90..fa72edacaa 100644
--- a/deps/rabbitmq_stomp/Makefile
+++ b/deps/rabbitmq_stomp/Makefile
@@ -42,5 +42,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_stomp/erlang.mk b/deps/rabbitmq_stomp/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_stomp/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_stomp/include/rabbit_stomp.hrl b/deps/rabbitmq_stomp/include/rabbit_stomp.hrl
index 3d31535d14..8952a4883f 100644
--- a/deps/rabbitmq_stomp/include/rabbit_stomp.hrl
+++ b/deps/rabbitmq_stomp/include/rabbit_stomp.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-record(stomp_configuration, {default_login,
diff --git a/deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl b/deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl
index 13b8b2e94c..bac0652141 100644
--- a/deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl
+++ b/deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-record(stomp_frame, {command, headers, body_iolist}).
diff --git a/deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl b/deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl
index 974b5825c8..a91189cb4a 100644
--- a/deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl
+++ b/deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-define(HEADER_ACCEPT_VERSION, "accept-version").
@@ -26,6 +26,7 @@
-define(HEADER_PASSCODE, "passcode").
-define(HEADER_PERSISTENT, "persistent").
-define(HEADER_PREFETCH_COUNT, "prefetch-count").
+-define(HEADER_X_STREAM_OFFSET, "x-stream-offset").
-define(HEADER_PRIORITY, "priority").
-define(HEADER_RECEIPT, "receipt").
-define(HEADER_REDELIVERED, "redelivered").
diff --git a/deps/rabbitmq_stomp/rabbitmq-components.mk b/deps/rabbitmq_stomp/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_stomp/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl b/deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl
index d26615e99f..34727130ab 100644
--- a/deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl
+++ b/deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl
@@ -1,17 +1,9 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand').
@@ -19,6 +11,15 @@
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref([
+ {'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Core.Helpers', nodes_in_cluster, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', prepare_info_keys, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.RpcStream', receive_list_items, 7},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', validate_info_keys, 2},
+ {'Elixir.Enum', join, 2}
+]).
+
-export([formatter/0,
scopes/0,
switches/0,
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp.erl b/deps/rabbitmq_stomp/src/rabbit_stomp.erl
index 449c2ef92f..513530e4ac 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stomp).
@@ -97,7 +88,7 @@ parse_default_user([{passcode, Passcode} | Rest], Configuration) ->
default_passcode = Passcode});
parse_default_user([Unknown | Rest], Configuration) ->
rabbit_log:warning("rabbit_stomp: ignoring invalid default_user "
- "configuration option: ~p~n", [Unknown]),
+ "configuration option: ~p", [Unknown]),
parse_default_user(Rest, Configuration).
report_configuration(#stomp_configuration{
@@ -107,25 +98,27 @@ report_configuration(#stomp_configuration{
case Login of
undefined -> ok;
_ -> rabbit_log:info("rabbit_stomp: default user '~s' "
- "enabled~n", [Login])
+ "enabled", [Login])
end,
case ImplicitConnect of
- true -> rabbit_log:info("rabbit_stomp: implicit connect enabled~n");
+ true -> rabbit_log:info("rabbit_stomp: implicit connect enabled");
false -> ok
end,
case SSLCertLogin of
- true -> rabbit_log:info("rabbit_stomp: ssl_cert_login enabled~n");
+ true -> rabbit_log:info("rabbit_stomp: ssl_cert_login enabled");
false -> ok
end,
ok.
list() ->
- [Client
- || {_, ListSupPid, _, _} <- supervisor2:which_children(rabbit_stomp_sup),
- {_, RanchSup, supervisor, _} <- supervisor2:which_children(ListSupPid),
- {ranch_conns_sup, ConnSup, _, _} <- supervisor:which_children(RanchSup),
- {_, CliSup, _, _} <- supervisor:which_children(ConnSup),
- {rabbit_stomp_reader, Client, _, _} <- supervisor:which_children(CliSup)].
+ [Client ||
+ {_, ListSup, _, _} <- supervisor2:which_children(rabbit_stomp_sup),
+ {_, RanchEmbeddedSup, supervisor, _} <- supervisor2:which_children(ListSup),
+ {{ranch_listener_sup, _}, RanchListSup, _, _} <- supervisor:which_children(RanchEmbeddedSup),
+ {ranch_conns_sup_sup, RanchConnsSup, supervisor, _} <- supervisor2:which_children(RanchListSup),
+ {_, RanchConnSup, supervisor, _} <- supervisor2:which_children(RanchConnsSup),
+ {_, StompClientSup, supervisor, _} <- supervisor2:which_children(RanchConnSup),
+ {rabbit_stomp_reader, Client, _, _} <- supervisor:which_children(StompClientSup)].
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl
index d40e00f811..6548351a4f 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stomp_client_sup).
@@ -20,9 +11,9 @@
-include_lib("rabbit_common/include/rabbit.hrl").
--export([start_link/4, init/1]).
+-export([start_link/3, init/1]).
-start_link(Ref, _Sock, _Transport, Configuration) ->
+start_link(Ref, _Transport, Configuration) ->
{ok, SupPid} = supervisor2:start_link(?MODULE, []),
{ok, HelperPid} =
supervisor2:start_child(SupPid,
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl
index e1562796e3..3d4d46585a 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl
@@ -1,18 +1,10 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
+
-module(rabbit_stomp_connection_info).
%% Note: this is necessary to prevent code:get_object_code from
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl
index 6b91dc3748..841b2d2c3b 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl
@@ -1,21 +1,9 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-%%
-
-%% stomp_frame implements the STOMP framing protocol "version 1.0", as
-%% per https://stomp.codehaus.org/Protocol
-module(rabbit_stomp_frame).
@@ -27,6 +15,7 @@
boolean_header/2, boolean_header/3,
integer_header/2, integer_header/3,
binary_header/2, binary_header/3]).
+-export([stream_offset_header/2]).
-export([serialize/1, serialize/2]).
initial_state() -> none.
@@ -222,6 +211,22 @@ binary_header(F, K) ->
binary_header(F, K, D) -> default_value(binary_header(F, K), D).
+stream_offset_header(F, D) ->
+ case binary_header(F, ?HEADER_X_STREAM_OFFSET, D) of
+ <<"first">> ->
+ {longstr, <<"first">>};
+ <<"last">> ->
+ {longstr, <<"last">>};
+ <<"next">> ->
+ {longstr, <<"next">>};
+ <<"offset=", OffsetValue/binary>> ->
+ {long, binary_to_integer(OffsetValue)};
+ <<"timestamp=", TimestampValue/binary>> ->
+ {timestamp, binary_to_integer(TimestampValue)};
+ _ ->
+ D
+ end.
+
serialize(Frame) ->
serialize(Frame, true).
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl
index 47331312ce..e225bdc12c 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stomp_internal_event_handler).
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl
index 570a7a146a..4cbdaf10ba 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl
@@ -1,21 +1,14 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stomp_processor).
+-compile({no_auto_import, [error/3]}).
+
-export([initial_state/2, process_frame/2, flush_and_die/1]).
-export([flush_pending_receipts/3,
handle_exit/3,
@@ -618,25 +611,24 @@ do_login(Username, Passwd, VirtualHost, Heartbeat, AdapterInfo, Version,
connection = Connection,
version = Version});
{error, {auth_failure, _}} ->
- rabbit_log:warning("STOMP login failed for user ~p~n",
- [binary_to_list(Username)]),
+ rabbit_log:warning("STOMP login failed for user '~s': authentication failed", [Username]),
error("Bad CONNECT", "Access refused for user '" ++
binary_to_list(Username) ++ "'~n", [], State);
{error, not_allowed} ->
- rabbit_log:warning("STOMP login failed - not_allowed "
- "(vhost access not allowed)~n"),
+ rabbit_log:warning("STOMP login failed for user '~s': "
+ "virtual host access not allowed", [Username]),
error("Bad CONNECT", "Virtual host '" ++
binary_to_list(VirtualHost) ++
"' access denied", State);
{error, access_refused} ->
- rabbit_log:warning("STOMP login failed - access_refused "
- "(vhost access not allowed)~n"),
+ rabbit_log:warning("STOMP login failed for user '~s': "
+ "virtual host access not allowed", [Username]),
error("Bad CONNECT", "Virtual host '" ++
binary_to_list(VirtualHost) ++
"' access denied", State);
{error, not_loopback} ->
- rabbit_log:warning("STOMP login failed - access_refused "
- "(user must access over loopback)~n"),
+ rabbit_log:warning("STOMP login failed for user '~s': "
+ "this user's access is restricted to localhost", [Username]),
error("Bad CONNECT", "non-loopback access denied", State)
end.
@@ -684,6 +676,13 @@ do_subscribe(Destination, DestHdr, Frame,
{stop, normal, close_connection(State)};
error ->
ExchangeAndKey = parse_routing(Destination, DfltTopicEx),
+ StreamOffset = rabbit_stomp_frame:stream_offset_header(Frame, undefined),
+ Arguments = case StreamOffset of
+ undefined ->
+ [];
+ {Type, Value} ->
+ [{<<"x-stream-offset">>, Type, Value}]
+ end,
try
amqp_channel:subscribe(Channel,
#'basic.consume'{
@@ -692,7 +691,7 @@ do_subscribe(Destination, DestHdr, Frame,
no_local = false,
no_ack = (AckMode == auto),
exclusive = false,
- arguments = []},
+ arguments = Arguments},
self()),
ok = rabbit_routing_util:ensure_binding(
Queue, ExchangeAndKey, Channel)
@@ -828,7 +827,7 @@ send_delivery(Delivery = #'basic.deliver'{consumer_tag = ConsumerTag},
NewState.
notify_received(undefined) ->
- %% no notification for quorum queues
+ %% no notification for quorum queues and streams
ok;
notify_received(DeliveryCtx) ->
%% notification for flow control
@@ -1133,12 +1132,12 @@ ok(Command, Headers, BodyFragments, State) ->
body_iolist = BodyFragments}, State}.
amqp_death(access_refused = ErrorName, Explanation, State) ->
- ErrorDesc = rabbit_misc:format("~s~n", [Explanation]),
+ ErrorDesc = rabbit_misc:format("~s", [Explanation]),
log_error(ErrorName, ErrorDesc, none),
{stop, normal, close_connection(send_error(atom_to_list(ErrorName), ErrorDesc, State))};
amqp_death(ReplyCode, Explanation, State) ->
ErrorName = amqp_connection:error_atom(ReplyCode),
- ErrorDesc = rabbit_misc:format("~s~n", [Explanation]),
+ ErrorDesc = rabbit_misc:format("~s", [Explanation]),
log_error(ErrorName, ErrorDesc, none),
{stop, normal, close_connection(send_error(atom_to_list(ErrorName), ErrorDesc, State))}.
@@ -1160,7 +1159,7 @@ log_error(Message, Detail, ServerPrivateDetail) ->
rabbit_log:error("STOMP error frame sent:~n"
"Message: ~p~n"
"Detail: ~p~n"
- "Server private detail: ~p~n",
+ "Server private detail: ~p",
[Message, Detail, ServerPrivateDetail]).
%%----------------------------------------------------------------------------
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl
index 8f081d618f..7787e7a372 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stomp_reader).
@@ -70,7 +61,7 @@ init([SupHelperPid, Ref, Configuration]) ->
ProcState = rabbit_stomp_processor:initial_state(Configuration,
ProcInitArgs),
- rabbit_log_connection:info("accepting STOMP connection ~p (~s)~n",
+ rabbit_log_connection:info("accepting STOMP connection ~p (~s)",
[self(), ConnStr]),
ParseState = rabbit_stomp_frame:initial_state(),
@@ -298,7 +289,7 @@ code_change(_OldVsn, State, _Extra) ->
log_reason({network_error, {ssl_upgrade_error, closed}, ConnStr}, _State) ->
- rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: connection closed~n",
+ rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: connection closed",
[ConnStr]);
@@ -319,46 +310,46 @@ log_reason({network_error,
{tls_alert, Alert}}, ConnStr}, _State) ->
log_tls_alert(Alert, ConnStr);
log_reason({network_error, {ssl_upgrade_error, Reason}, ConnStr}, _State) ->
- rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: ~p~n",
+ rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: ~p",
[ConnStr, Reason]);
log_reason({network_error, Reason, ConnStr}, _State) ->
- rabbit_log_connection:error("STOMP detected network error on ~s: ~p~n",
+ rabbit_log_connection:error("STOMP detected network error on ~s: ~p",
[ConnStr, Reason]);
log_reason({network_error, Reason}, _State) ->
- rabbit_log_connection:error("STOMP detected network error: ~p~n", [Reason]);
+ rabbit_log_connection:error("STOMP detected network error: ~p", [Reason]);
log_reason({shutdown, client_heartbeat_timeout},
#reader_state{ processor_state = ProcState }) ->
AdapterName = rabbit_stomp_processor:adapter_name(ProcState),
rabbit_log_connection:warning("STOMP detected missed client heartbeat(s) "
- "on connection ~s, closing it~n", [AdapterName]);
+ "on connection ~s, closing it", [AdapterName]);
log_reason({shutdown, {server_initiated_close, Reason}},
#reader_state{conn_name = ConnName}) ->
- rabbit_log_connection:info("closing STOMP connection ~p (~s), reason: ~s~n",
+ rabbit_log_connection:info("closing STOMP connection ~p (~s), reason: ~s",
[self(), ConnName, Reason]);
log_reason(normal, #reader_state{conn_name = ConnName}) ->
- rabbit_log_connection:info("closing STOMP connection ~p (~s)~n", [self(), ConnName]);
+ rabbit_log_connection:info("closing STOMP connection ~p (~s)", [self(), ConnName]);
log_reason(shutdown, undefined) ->
- rabbit_log_connection:error("closing STOMP connection that never completed connection handshake (negotiation)~n", []);
+ rabbit_log_connection:error("closing STOMP connection that never completed connection handshake (negotiation)");
log_reason(Reason, #reader_state{processor_state = ProcState}) ->
AdapterName = rabbit_stomp_processor:adapter_name(ProcState),
rabbit_log_connection:warning("STOMP connection ~s terminated"
- " with reason ~p, closing it~n", [AdapterName, Reason]).
+ " with reason ~p, closing it", [AdapterName, Reason]).
log_tls_alert(handshake_failure, ConnStr) ->
- rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: handshake failure~n",
+ rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: handshake failure",
[ConnStr]);
log_tls_alert(unknown_ca, ConnStr) ->
- rabbit_log_connection:error("STOMP detected TLS certificate verification error on ~s: alert 'unknown CA'~n",
+ rabbit_log_connection:error("STOMP detected TLS certificate verification error on ~s: alert 'unknown CA'",
[ConnStr]);
log_tls_alert(Alert, ConnStr) ->
- rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: alert ~s~n",
+ rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: alert ~s",
[ConnStr, Alert]).
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl
index ee74569af9..0036dbf151 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stomp_sup).
@@ -28,6 +19,7 @@ start_link(Listeners, Configuration) ->
init([{Listeners, SslListeners0}, Configuration]) ->
NumTcpAcceptors = application:get_env(rabbitmq_stomp, num_tcp_acceptors, 10),
+ ConcurrentConnsSups = application:get_env(rabbitmq_stomp, num_conns_sups, 1),
{ok, SocketOpts} = application:get_env(rabbitmq_stomp, tcp_listen_options),
{SslOpts, NumSslAcceptors, SslListeners}
= case SslListeners0 of
@@ -46,9 +38,11 @@ init([{Listeners, SslListeners0}, Configuration]) ->
},
{ok, {Flags,
listener_specs(fun tcp_listener_spec/1,
- [SocketOpts, Configuration, NumTcpAcceptors], Listeners) ++
+ [SocketOpts, Configuration, NumTcpAcceptors, ConcurrentConnsSups],
+ Listeners) ++
listener_specs(fun ssl_listener_spec/1,
- [SocketOpts, SslOpts, Configuration, NumSslAcceptors], SslListeners)}}.
+ [SocketOpts, SslOpts, Configuration, NumSslAcceptors, ConcurrentConnsSups],
+ SslListeners)}}.
stop_listeners() ->
rabbit_networking:stop_ranch_listener_of_protocol(?TCP_PROTOCOL),
@@ -64,17 +58,17 @@ listener_specs(Fun, Args, Listeners) ->
Listener <- Listeners,
Address <- rabbit_networking:tcp_listener_addresses(Listener)].
-tcp_listener_spec([Address, SocketOpts, Configuration, NumAcceptors]) ->
+tcp_listener_spec([Address, SocketOpts, Configuration, NumAcceptors, ConcurrentConnsSups]) ->
rabbit_networking:tcp_listener_spec(
rabbit_stomp_listener_sup, Address, SocketOpts,
transport(?TCP_PROTOCOL), rabbit_stomp_client_sup, Configuration,
- stomp, NumAcceptors, "STOMP TCP listener").
+ stomp, NumAcceptors, ConcurrentConnsSups, "STOMP TCP listener").
-ssl_listener_spec([Address, SocketOpts, SslOpts, Configuration, NumAcceptors]) ->
+ssl_listener_spec([Address, SocketOpts, SslOpts, Configuration, NumAcceptors, ConcurrentConnsSups]) ->
rabbit_networking:tcp_listener_spec(
rabbit_stomp_listener_sup, Address, SocketOpts ++ SslOpts,
transport(?TLS_PROTOCOL), rabbit_stomp_client_sup, Configuration,
- 'stomp/ssl', NumAcceptors, "STOMP TLS listener").
+ 'stomp/ssl', NumAcceptors, ConcurrentConnsSups, "STOMP TLS listener").
transport(Protocol) ->
case Protocol of
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_util.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_util.erl
index 6df1affbb7..bdccbc7934 100644
--- a/deps/rabbitmq_stomp/src/rabbit_stomp_util.erl
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_util.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stomp_util).
@@ -115,6 +106,8 @@ adhoc_convert_headers(Headers, Existing) ->
[{binary_to_list(K), binary_to_list(V)} | Acc];
({K, signedint, V}, Acc) ->
[{binary_to_list(K), integer_to_list(V)} | Acc];
+ ({K, long, V}, Acc) ->
+ [{binary_to_list(K), integer_to_list(V)} | Acc];
(_, Acc) ->
Acc
end, Existing, Headers).
diff --git a/deps/rabbitmq_stomp/test/command_SUITE.erl b/deps/rabbitmq_stomp/test/command_SUITE.erl
index 8fe9fa0d0f..f4f8a5b9d0 100644
--- a/deps/rabbitmq_stomp/test/command_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/command_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(command_SUITE).
diff --git a/deps/rabbitmq_stomp/test/config_schema_SUITE.erl b/deps/rabbitmq_stomp/test/config_schema_SUITE.erl
index 8d340810f7..06d5be22bc 100644
--- a/deps/rabbitmq_stomp/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_stomp/test/connections_SUITE.erl b/deps/rabbitmq_stomp/test/connections_SUITE.erl
index 4f9b027bb9..cc57ec9283 100644
--- a/deps/rabbitmq_stomp/test/connections_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/connections_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(connections_SUITE).
diff --git a/deps/rabbitmq_stomp/test/frame_SUITE.erl b/deps/rabbitmq_stomp/test/frame_SUITE.erl
index da191ac12a..93f21c22ee 100644
--- a/deps/rabbitmq_stomp/test/frame_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/frame_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(frame_SUITE).
@@ -38,7 +38,8 @@ all() ->
header_value_with_cr,
header_value_with_colon,
headers_escaping_roundtrip,
- headers_escaping_roundtrip_without_trailing_lf
+ headers_escaping_roundtrip_without_trailing_lf,
+ stream_offset_header
].
parse_simple_frame(_) ->
@@ -162,6 +163,24 @@ header_value_with_colon(_) ->
headers = [{"header", "val:ue"}],
body_iolist = []}).
+stream_offset_header(_) ->
+ TestCases = [
+ {{"x-stream-offset", "first"}, {longstr, <<"first">>}},
+ {{"x-stream-offset", "last"}, {longstr, <<"last">>}},
+ {{"x-stream-offset", "next"}, {longstr, <<"next">>}},
+ {{"x-stream-offset", "offset=5000"}, {long, 5000}},
+ {{"x-stream-offset", "timestamp=1000"}, {timestamp, 1000}},
+ {{"x-stream-offset", "foo"}, undefined},
+ {{"some-header", "some value"}, undefined}
+ ],
+
+ lists:foreach(fun({Header, Expected}) ->
+ ?assertEqual(
+ Expected,
+ rabbit_stomp_frame:stream_offset_header(#stomp_frame{headers = [Header]}, undefined)
+ )
+ end, TestCases).
+
test_frame_serialization(Expected, TrailingLF) ->
{ok, Frame, _} = parse(Expected),
{ok, Val} = rabbit_stomp_frame:header(Frame, "head\r:\ner"),
diff --git a/deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl b/deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl
index 46c1c6c743..efb48c8dc9 100644
--- a/deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(proxy_protocol_SUITE).
@@ -68,7 +68,7 @@ proxy_protocol(Config) ->
{ok, _Packet} = gen_tcp:recv(Socket, 0, ?TIMEOUT),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
gen_tcp:close(Socket),
ok.
@@ -83,7 +83,7 @@ proxy_protocol_tls(Config) ->
{ok, _Packet} = ssl:recv(SslSocket, 0, ?TIMEOUT),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
gen_tcp:close(Socket),
ok.
@@ -101,4 +101,4 @@ stomp_connect_frame() ->
"login:guest\n",
"passcode:guest\n",
"\n",
- 0>>. \ No newline at end of file
+ 0>>.
diff --git a/deps/rabbitmq_stomp/test/python_SUITE.erl b/deps/rabbitmq_stomp/test/python_SUITE.erl
index 9613b25032..84ceefe1ec 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/python_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(python_SUITE).
@@ -11,16 +11,31 @@
all() ->
[
- common,
- ssl,
- connect_options
+ %% This must use a dedicated node as they mess with plugin configuration in incompatible ways
+ {group, tls},
+ {group, implicit_connect},
+ {group, main}
].
-init_per_testcase(TestCase, Config) ->
- Suffix = rabbit_ct_helpers:testcase_absname(Config, TestCase, "-"),
+groups() ->
+ [
+ {main, [], [
+ main
+ ]},
+ {implicit_connect, [], [
+ implicit_connect
+ ]},
+ {tls, [], [
+ tls_connections
+ ]}
+ ].
+
+init_per_group(_, Config) ->
Config1 = rabbit_ct_helpers:set_config(Config,
- [{rmq_certspwd, "bunnychow"},
- {rmq_nodename_suffix, Suffix}]),
+ [
+ {rmq_nodename_suffix, ?MODULE},
+ {rmq_certspwd, "bunnychow"}
+ ]),
rabbit_ct_helpers:log_environment(),
Config2 = rabbit_ct_helpers:run_setup_steps(
Config1,
@@ -32,19 +47,26 @@ init_per_testcase(TestCase, Config) ->
rabbit_ct_helpers:make(Config2, StomppyDir, []),
Config2.
-end_per_testcase(_, Config) ->
+end_per_group(_, Config) ->
rabbit_ct_helpers:run_teardown_steps(Config,
rabbit_ct_broker_helpers:teardown_steps()).
+init_per_testcase(Test, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Test).
+
+end_per_testcase(Test, Config) ->
+ rabbit_ct_helpers:testcase_finished(Config, Test).
+
+
+main(Config) ->
+ run(Config, filename:join("src", "main_runner.py")).
-common(Config) ->
- run(Config, filename:join("src", "test.py")).
+implicit_connect(Config) ->
+ run(Config, filename:join("src", "implicit_connect_runner.py")).
-connect_options(Config) ->
- run(Config, filename:join("src", "test_connect_options.py")).
+tls_connections(Config) ->
+ run(Config, filename:join("src", "tls_runner.py")).
-ssl(Config) ->
- run(Config, filename:join("src", "test_ssl.py")).
run(Config, Test) ->
DataDir = ?config(data_dir, Config),
@@ -53,12 +75,13 @@ run(Config, Test) ->
StompPortTls = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp_tls),
AmqpPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp),
NodeName = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- PythonPath = os:getenv("PYTHONPATH"),
- os:putenv("PYTHONPATH", filename:join([DataDir, "deps", "pika","pika"])
- ++":"++
- filename:join([DataDir, "deps", "stomppy", "stomppy"])
- ++ ":" ++
- PythonPath),
+ PikaPath = filename:join([DataDir, "deps", "pika","pika"]),
+ StomppyPath = filename:join([DataDir, "deps", "stomppy", "stomppy"]),
+ PythonPath = case os:getenv("PYTHONPATH") of
+ false -> PikaPath ++ ":" ++ StomppyPath;
+ P -> PikaPath ++ ":" ++ StomppyPath ++ ":" ++ P
+ end,
+ os:putenv("PYTHONPATH", PythonPath),
os:putenv("AMQP_PORT", integer_to_list(AmqpPort)),
os:putenv("STOMP_PORT", integer_to_list(StompPort)),
os:putenv("STOMP_PORT_TLS", integer_to_list(StompPortTls)),
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile b/deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile
index 40f5bd1db7..f1fb64c3bd 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile
@@ -1,5 +1,5 @@
UPSTREAM_GIT=https://github.com/jasonrbriggs/stomp.py.git
-REVISION=v4.0.16
+REVISION=v6.1.0
LIB_DIR=stomppy
CHECKOUT_DIR=stomppy-git
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py
index 9103bc76ea..b8b60dcbf2 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py
@@ -24,7 +24,7 @@ class TestAck(base.BaseTest):
self.conn.send(destination, "test1")
self.conn.send(destination, "test2")
self.assertTrue(self.listener.wait(4), "initial message not received")
- self.assertEquals(2, len(self.listener.messages))
+ self.assertEqual(2, len(self.listener.messages))
# disconnect with no ack
self.conn.disconnect()
@@ -39,7 +39,7 @@ class TestAck(base.BaseTest):
ack='client',
headers={'prefetch-count': '10'})
self.assertTrue(listener2.wait(), "message not received again")
- self.assertEquals(2, len(listener2.messages))
+ self.assertEqual(2, len(listener2.messages))
# now ack only the last message - expecting cumulative behaviour
mid = listener2.messages[1]['headers'][self.ack_id_source_header]
@@ -69,7 +69,7 @@ class TestAck(base.BaseTest):
self.conn.send(destination, "test1")
self.conn.send(destination, "test2")
self.assertTrue(self.listener.wait(4), "Both initial messages not received")
- self.assertEquals(2, len(self.listener.messages))
+ self.assertEqual(2, len(self.listener.messages))
# disconnect without acks
self.conn.disconnect()
@@ -84,7 +84,7 @@ class TestAck(base.BaseTest):
ack='client-individual',
headers={'prefetch-count': '10'})
self.assertTrue(listener2.wait(2.5), "Did not receive 2 messages")
- self.assertEquals(2, len(listener2.messages), "Not exactly 2 messages received")
+ self.assertEqual(2, len(listener2.messages), "Not exactly 2 messages received")
# now ack only the 'test2' message - expecting individual behaviour
nummsgs = len(listener2.messages)
@@ -92,7 +92,7 @@ class TestAck(base.BaseTest):
for ind in range(nummsgs):
if listener2.messages[ind]['message']=="test2":
mid = listener2.messages[ind]['headers'][self.ack_id_source_header]
- self.assertEquals(1, ind, 'Expecting test2 to be second message')
+ self.assertEqual(1, ind, 'Expecting test2 to be second message')
break
self.assertTrue(mid, "Did not find test2 message id.")
self.ack_message(conn2, mid, None)
@@ -108,8 +108,8 @@ class TestAck(base.BaseTest):
self.subscribe_dest(conn3, destination, None)
self.assertFalse(listener3.wait(2.5),
"Expected to see only one message. ACK not working?")
- self.assertEquals(1, len(listener3.messages), "Expecting exactly one message")
- self.assertEquals("test1", listener3.messages[0]['message'], "Unexpected message remains")
+ self.assertEqual(1, len(listener3.messages), "Expecting exactly one message")
+ self.assertEqual("test1", listener3.messages[0]['message'], "Unexpected message remains")
finally:
conn3.disconnect()
@@ -121,7 +121,7 @@ class TestAck(base.BaseTest):
self.subscribe_dest(self.conn, destination, None, ack='client')
self.conn.send(destination, "test")
self.assertTrue(self.listener.wait(3), "initial message not received")
- self.assertEquals(1, len(self.listener.messages))
+ self.assertEqual(1, len(self.listener.messages))
# disconnect with no ack
self.conn.disconnect()
@@ -135,7 +135,7 @@ class TestAck(base.BaseTest):
conn2.begin(transaction=tx)
self.subscribe_dest(conn2, destination, None, ack='client')
self.assertTrue(listener2.wait(), "message not received again")
- self.assertEquals(1, len(listener2.messages))
+ self.assertEqual(1, len(listener2.messages))
# now ack
mid = listener2.messages[0]['headers'][self.ack_id_source_header]
@@ -171,7 +171,7 @@ class TestAck(base.BaseTest):
self.assertFalse(self.listener.wait(3),
"Should not have been able to see 6 messages")
- self.assertEquals(5, len(self.listener.messages))
+ self.assertEqual(5, len(self.listener.messages))
def test_nack(self):
destination = "/queue/nack-test"
@@ -236,7 +236,7 @@ class TestAck11(TestAck):
return conn
def test_version(self):
- self.assertEquals('1.1', self.conn.version)
+ self.assertEqual('1.1', self.conn.version)
class TestAck12(TestAck):
@@ -249,4 +249,13 @@ class TestAck12(TestAck):
return conn
def test_version(self):
- self.assertEquals('1.2', self.conn.version)
+ self.assertEqual('1.2', self.conn.version)
+
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py
index 2c5ee45a8e..9b61b723a2 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py
@@ -31,12 +31,20 @@ class TestAmqpHeaders(base.BaseTest):
# check if we receive the message from the STOMP subscription
self.assertTrue(self.listener.wait(2), "initial message not received")
- self.assertEquals(1, len(self.listener.messages))
+ self.assertEqual(1, len(self.listener.messages))
msg = self.listener.messages[0]
- self.assertEquals('Hello World!', msg['message'])
- self.assertEquals('value1', msg['headers']['x-custom-hdr-1'])
- self.assertEquals('value2', msg['headers']['x-custom-hdr-2'])
- self.assertEquals('value3', msg['headers']['custom-hdr-3'])
+ self.assertEqual('Hello World!', msg['message'])
+ self.assertEqual('value1', msg['headers']['x-custom-hdr-1'])
+ self.assertEqual('value2', msg['headers']['x-custom-hdr-2'])
+ self.assertEqual('value3', msg['headers']['custom-hdr-3'])
self.conn.disconnect()
amqp_conn.close()
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py
index a8f7ef59b9..6ea54de275 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py
@@ -1,8 +1,8 @@
-## This Source Code Form is subject to the terms of the Mozilla Public
-## License, v. 2.0. If a copy of the MPL was not distributed with this
-## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at https://mozilla.org/MPL/2.0/.
##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+# Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
##
import unittest
@@ -10,112 +10,128 @@ import stomp
import sys
import threading
import os
+import time
+import random
+from stomp.listener import ConnectionListener
-class BaseTest(unittest.TestCase):
- def create_connection_obj(self, version='1.0', vhost='/', heartbeats=(0, 0)):
- if version == '1.0':
- conn = stomp.StompConnection10(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))])
- self.ack_id_source_header = 'message-id'
- self.ack_id_header = 'message-id'
- elif version == '1.1':
- conn = stomp.StompConnection11(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
- vhost=vhost,
- heartbeats=heartbeats)
- self.ack_id_source_header = 'message-id'
- self.ack_id_header = 'message-id'
- elif version == '1.2':
- conn = stomp.StompConnection12(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
- vhost=vhost,
- heartbeats=heartbeats)
- self.ack_id_source_header = 'ack'
- self.ack_id_header = 'id'
- else:
- conn = stomp.StompConnection12(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
- vhost=vhost,
- heartbeats=heartbeats)
- conn.version = version
- return conn
-
- def create_connection(self, user='guest', passcode='guest', wait=True, **kwargs):
- conn = self.create_connection_obj(**kwargs)
- conn.start()
- conn.connect(user, passcode, wait=wait)
- return conn
-
- def subscribe_dest(self, conn, destination, sub_id, **kwargs):
- if type(conn) is stomp.StompConnection10:
- # 'id' is optional in STOMP 1.0.
- if sub_id != None:
- kwargs['id'] = sub_id
- conn.subscribe(destination, **kwargs)
- else:
- # 'id' is required in STOMP 1.1+.
- if sub_id == None:
- sub_id = 'ctag'
- conn.subscribe(destination, sub_id, **kwargs)
-
- def unsubscribe_dest(self, conn, destination, sub_id, **kwargs):
- if type(conn) is stomp.StompConnection10:
- # 'id' is optional in STOMP 1.0.
- if sub_id != None:
- conn.unsubscribe(id=sub_id, **kwargs)
- else:
- conn.unsubscribe(destination=destination, **kwargs)
- else:
- # 'id' is required in STOMP 1.1+.
- if sub_id == None:
- sub_id = 'ctag'
- conn.unsubscribe(sub_id, **kwargs)
-
- def ack_message(self, conn, msg_id, sub_id, **kwargs):
- if type(conn) is stomp.StompConnection10:
- conn.ack(msg_id, **kwargs)
- elif type(conn) is stomp.StompConnection11:
- if sub_id == None:
- sub_id = 'ctag'
- conn.ack(msg_id, sub_id, **kwargs)
- elif type(conn) is stomp.StompConnection12:
- conn.ack(msg_id, **kwargs)
-
- def nack_message(self, conn, msg_id, sub_id, **kwargs):
- if type(conn) is stomp.StompConnection10:
- # Normally unsupported by STOMP 1.0.
- conn.send_frame("NACK", {"message-id": msg_id})
- elif type(conn) is stomp.StompConnection11:
- if sub_id == None:
- sub_id = 'ctag'
- conn.nack(msg_id, sub_id, **kwargs)
- elif type(conn) is stomp.StompConnection12:
- conn.nack(msg_id, **kwargs)
-
- def create_subscriber_connection(self, dest):
- conn = self.create_connection()
- listener = WaitableListener()
- conn.set_listener('', listener)
- self.subscribe_dest(conn, dest, None, receipt="sub.receipt")
- listener.wait()
- self.assertEquals(1, len(listener.receipts))
- listener.reset()
- return conn, listener
-
- def setUp(self):
- # Note: useful for debugging
- # import stomp.listener
+class BaseTest(unittest.TestCase):
+ def await_condition(self, condition, timeout=5):
+ cond = threading.Condition()
+ try:
+ cond.acquire()
+ cond.wait_for(lambda: condition(), timeout)
+ finally:
+ cond.release()
+
+ def create_connection_obj(self, version='1.0', vhost='/', heartbeats=(0, 0)):
+ if version == '1.0':
+ conn = stomp.StompConnection10(
+ host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))])
+ self.ack_id_source_header = 'message-id'
+ self.ack_id_header = 'message-id'
+ elif version == '1.1':
+ conn = stomp.StompConnection11(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
+ vhost=vhost,
+ heartbeats=heartbeats)
+ self.ack_id_source_header = 'message-id'
+ self.ack_id_header = 'message-id'
+ elif version == '1.2':
+ conn = stomp.StompConnection12(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
+ vhost=vhost,
+ heartbeats=heartbeats)
+ self.ack_id_source_header = 'ack'
+ self.ack_id_header = 'id'
+ else:
+ conn = stomp.StompConnection12(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
+ vhost=vhost,
+ heartbeats=heartbeats)
+ conn.version = version
+ return conn
+
+ def create_connection(self, user='guest', passcode='guest', wait=True, **kwargs):
+ conn = self.create_connection_obj(**kwargs)
+ conn.connect(user, passcode, wait=wait)
+ return conn
+
+ def subscribe_dest(self, conn, destination, sub_id, **kwargs):
+ if type(conn) is stomp.StompConnection10:
+ # 'id' is optional in STOMP 1.0.
+ if sub_id != None:
+ kwargs['id'] = sub_id
+ conn.subscribe(destination, **kwargs)
+ else:
+ # 'id' is required in STOMP 1.1+.
+ if sub_id == None:
+ sub_id = 'ctag'
+ conn.subscribe(destination, sub_id, **kwargs)
+
+ def unsubscribe_dest(self, conn, destination, sub_id, **kwargs):
+ if type(conn) is stomp.StompConnection10:
+ # 'id' is optional in STOMP 1.0.
+ if sub_id != None:
+ conn.unsubscribe(id=sub_id, **kwargs)
+ else:
+ conn.unsubscribe(destination=destination, **kwargs)
+ else:
+ # 'id' is required in STOMP 1.1+.
+ if sub_id == None:
+ sub_id = 'stomp-sub-id {}'.format(random.randint(0, 1000))
+ conn.unsubscribe(sub_id, **kwargs)
+
+ def ack_message(self, conn, msg_id, sub_id, **kwargs):
+ if type(conn) is stomp.StompConnection10:
+ conn.ack(msg_id, **kwargs)
+ elif type(conn) is stomp.StompConnection11:
+ if sub_id == None:
+ sub_id = 'stomp-sub-id {}'.format(random.randint(0, 1000))
+ conn.ack(msg_id, sub_id, **kwargs)
+ elif type(conn) is stomp.StompConnection12:
+ conn.ack(msg_id, **kwargs)
+
+ def nack_message(self, conn, msg_id, sub_id, **kwargs):
+ if type(conn) is stomp.StompConnection10:
+ # Normally unsupported by STOMP 1.0.
+ conn.send_frame("NACK", {"message-id": msg_id})
+ elif type(conn) is stomp.StompConnection11:
+ if sub_id == None:
+ sub_id = 'stomp-sub-id {}'.format(random.randint(0, 1000))
+ conn.nack(msg_id, sub_id, **kwargs)
+ elif type(conn) is stomp.StompConnection12:
+ conn.nack(msg_id, **kwargs)
+
+ def create_subscriber_connection(self, dest):
+ conn = self.create_connection()
+ listener = WaitableListener()
+ conn.set_listener('', listener)
+ self.subscribe_dest(conn, dest, None, receipt="sub.receipt")
+ listener.wait()
+ self.assertEqual(1, len(listener.receipts))
+ listener.reset()
+ return conn, listener
+
+ def setUp(self):
+ # Note: useful for debugging
+ # import stomp.listener
self.conn = self.create_connection()
self.listener = WaitableListener()
self.conn.set_listener('waitable', self.listener)
# Note: useful for debugging
# self.printing_listener = stomp.listener.PrintingListener()
# self.conn.set_listener('printing', self.printing_listener)
+ self._started_at = time.time()
- def tearDown(self):
+ def tearDown(self):
if self.conn.is_connected():
- self.conn.disconnect()
- self.conn.stop()
-
- def simple_test_send_rec(self, dest, headers={}):
+ try:
+ self.conn.disconnect()
+ except:
+ pass
+ elapsed = time.time() - self._started_at
+ print('{} ({}s)'.format(self.id(), round(elapsed, 2)))
+
+ def simple_test_send_rec(self, dest, headers={}):
self.listener.reset()
self.subscribe_dest(self.conn, dest, None)
@@ -129,11 +145,11 @@ class BaseTest(unittest.TestCase):
# check header content
msg = self.listener.messages[0]
- self.assertEquals("foo", msg['message'])
- self.assertEquals(dest, msg['headers']['destination'])
+ self.assertEqual("foo", msg['message'])
+ self.assertEqual(dest, msg['headers']['destination'])
return msg['headers']
- def assertListener(self, errMsg, numMsgs=0, numErrs=0, numRcts=0, timeout=10):
+ def assertListener(self, errMsg, numMsgs=0, numErrs=0, numRcts=0, timeout=10):
if numMsgs + numErrs + numRcts > 0:
self._assertTrue(self.listener.wait(timeout), errMsg + " (#awaiting)")
else:
@@ -142,28 +158,30 @@ class BaseTest(unittest.TestCase):
self._assertEquals(numErrs, len(self.listener.errors), errMsg + " (#errors)")
self._assertEquals(numRcts, len(self.listener.receipts), errMsg + " (#receipts)")
- def _assertTrue(self, bool, msg):
- if not bool:
- self.listener.print_state(msg, True)
- self.assertTrue(bool, msg)
+ def _assertTrue(self, bool, msg):
+ if not bool:
+ self.listener.print_state(msg, True)
+ self.assertTrue(bool, msg)
- def _assertFalse(self, bool, msg):
- if bool:
- self.listener.print_state(msg, True)
- self.assertFalse(bool, msg)
+ def _assertFalse(self, bool, msg):
+ if bool:
+ self.listener.print_state(msg, True)
+ self.assertFalse(bool, msg)
- def _assertEquals(self, expected, actual, msg):
- if expected != actual:
- self.listener.print_state(msg, True)
- self.assertEquals(expected, actual, msg)
+ def _assertEquals(self, expected, actual, msg):
+ if expected != actual:
+ self.listener.print_state(msg, True)
+ self.assertEqual(expected, actual, msg)
- def assertListenerAfter(self, verb, errMsg="", numMsgs=0, numErrs=0, numRcts=0, timeout=5):
+ def assertListenerAfter(self, verb, errMsg="", numMsgs=0, numErrs=0, numRcts=0, timeout=5):
num = numMsgs + numErrs + numRcts
- self.listener.reset(num if num>0 else 1)
+ self.listener.reset(num if num > 0 else 1)
verb()
- self.assertListener(errMsg=errMsg, numMsgs=numMsgs, numErrs=numErrs, numRcts=numRcts, timeout=timeout)
+ self.assertListener(errMsg=errMsg, numMsgs=numMsgs,
+ numErrs=numErrs, numRcts=numRcts, timeout=timeout)
-class WaitableListener(object):
+
+class WaitableListener(ConnectionListener):
def __init__(self):
self.debug = False
@@ -175,29 +193,24 @@ class WaitableListener(object):
self.latch = Latch(1)
self.msg_no = 0
- def _next_msg_no(self):
- self.msg_no += 1
- return self.msg_no
+ ##
+ ## API
+ ##
- def _append(self, array, msg, hdrs):
- mno = self._next_msg_no()
- array.append({'message' : msg, 'headers' : hdrs, 'msg_no' : mno})
- self.latch.countdown()
-
- def on_receipt(self, headers, message):
+ def on_receipt(self, frame):
if self.debug:
- print('(on_receipt) message: {}, headers: {}'.format(message, headers))
- self._append(self.receipts, message, headers)
+ print('(on_receipt) frame: {}, headers: {}'.format(frame.body, frame.headers))
+ self._append(self.receipts, frame.body, frame.headers)
- def on_error(self, headers, message):
+ def on_error(self, frame):
if self.debug:
- print('(on_error) message: {}, headers: {}'.format(message, headers))
- self._append(self.errors, message, headers)
+ print('(on_error) frame: {}, headers: {}'.format(frame.body, frame.headers))
+ self._append(self.errors, frame.body, frame.headers)
- def on_message(self, headers, message):
+ def on_message(self, frame):
if self.debug:
- print('(on_message) message: {}, headers: {}'.format(message, headers))
- self._append(self.messages, message, headers)
+ print('(on_message) message: {}, headers: {}'.format(frame.body, frame.headers))
+ self._append(self.messages, frame.body, frame.headers)
def reset(self, count=1):
if self.debug:
@@ -210,9 +223,17 @@ class WaitableListener(object):
if self.debug:
self.print_state('(reset listener--new state)')
- def wait(self, timeout=10):
+ def wait(self, timeout=4):
return self.latch.wait(timeout)
+ def wait_for(self, condition, timeout=5):
+ return self.latch.wait_for(condition, timeout)
+
+ def wait_for_complete_countdown(self, timeout=5):
+ return self.latch.wait_for_complete_countdown(timeout)
+
+ ## Implementation
+
def print_state(self, hdr="", full=False):
print(hdr)
print('#messages: {}'.format(len(self.messages)))
@@ -220,40 +241,63 @@ class WaitableListener(object):
print('#receipts: {}'.format(len(self.receipts)))
print('Remaining count: {}'.format(self.latch.get_count()))
if full:
- if len(self.messages) != 0: print('Messages: {}'.format(self.messages))
+ if len(self.messages) != 0:
+ print('Messages: {}'.format(self.messages))
if len(self.errors) != 0: print('Messages: {}'.format(self.errors))
- if len(self.receipts) != 0: print('Messages: {}'.format(self.receipts))
+ if len(self.receipts) != 0:
+ print('Messages: {}'.format(self.receipts))
+
+ def _next_msg_no(self):
+ self.msg_no += 1
+ return self.msg_no
+
+ def _append(self, array, msg, hdrs):
+ mno = self._next_msg_no()
+ array.append({'message': msg, 'headers' : hdrs, 'msg_no' : mno})
+ self.latch.countdown()
+
class Latch(object):
- def __init__(self, count=1):
- self.cond = threading.Condition()
- self.cond.acquire()
- self.count = count
- self.cond.release()
-
- def countdown(self):
- self.cond.acquire()
- if self.count > 0:
- self.count -= 1
- if self.count == 0:
- self.cond.notify_all()
- self.cond.release()
-
- def wait(self, timeout=None):
- try:
- self.cond.acquire()
- if self.count == 0:
- return True
- else:
- self.cond.wait(timeout)
- return self.count == 0
- finally:
- self.cond.release()
-
- def get_count(self):
- try:
- self.cond.acquire()
- return self.count
- finally:
- self.cond.release()
+ def __init__(self, count=1):
+ self.cond = threading.Condition()
+ self.cond.acquire()
+ self.count = count
+ self.cond.release()
+
+ def countdown(self):
+ self.cond.acquire(blocking=False)
+ if self.count > 0:
+ self.count -= 1
+ if self.count == 0:
+ self.cond.notify_all()
+ self.cond.release()
+
+ def wait(self, timeout=None):
+ try:
+ self.cond.acquire(blocking=False)
+ if self.count == 0:
+ return True
+ else:
+ self.cond.wait(timeout)
+ return self.count == 0
+ finally:
+ self.cond.release()
+
+ def wait_for_complete_countdown(self, timeout=None):
+ try:
+ self.cond.acquire()
+ if self.count == 0:
+ return True
+ else:
+ self.cond.wait_for(lambda: self.count == 0, timeout)
+ return True
+ finally:
+ self.cond.release()
+
+ def get_count(self):
+ try:
+ self.cond.acquire()
+ return self.count
+ finally:
+ self.cond.release()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_disconnect.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_disconnect.py
new file mode 100644
index 0000000000..cb25cec906
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_disconnect.py
@@ -0,0 +1,135 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import time
+import os
+import threading
+
+import test_util
+
+class TestConnectDisconnect(base.BaseTest):
+ def test_connect_version_1_0(self):
+ ''' Test CONNECT with version 1.0'''
+ self.conn.disconnect()
+ new_conn = self.create_connection(version="1.0")
+ try:
+ self.assertTrue(new_conn.is_connected())
+ finally:
+ new_conn.disconnect()
+
+ def test_connect_version_1_1(self):
+ ''' Test CONNECT with version 1.1'''
+ self.conn.disconnect()
+ new_conn = self.create_connection(version="1.1")
+ try:
+ self.assertTrue(new_conn.is_connected())
+ finally:
+ new_conn.disconnect()
+
+ def test_connect_version_1_2(self):
+ ''' Test CONNECT with version 1.2'''
+ self.conn.disconnect()
+ new_conn = self.create_connection(version="1.2")
+ try:
+ self.assertTrue(new_conn.is_connected())
+ finally:
+ new_conn.disconnect()
+
+ def test_default_user(self):
+ ''' Default user connection '''
+ self.conn.disconnect()
+ test_util.enable_default_user()
+ listener = base.WaitableListener()
+ new_conn = stomp.Connection(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))])
+ new_conn.set_listener('', listener)
+ new_conn.connect()
+ try:
+ self.assertFalse(listener.wait(3)) # no error back
+ self.assertTrue(new_conn.is_connected())
+ finally:
+ new_conn.disconnect()
+ test_util.disable_default_user()
+
+
+ def test_unsupported_version(self):
+ ''' Test unsupported version on CONNECT command'''
+ self.bad_connect("Supported versions are 1.0,1.1,1.2\n", version='100.1')
+
+ def test_bad_username(self):
+ ''' Test bad username'''
+ self.bad_connect("Access refused for user 'gust'\n", user='gust')
+
+ def test_bad_password(self):
+ ''' Test bad password'''
+ self.bad_connect("Access refused for user 'guest'\n", passcode='gust')
+
+ def test_bad_vhost(self):
+ ''' Test bad virtual host'''
+ self.bad_connect("Virtual host '//' access denied", version='1.1', vhost='//')
+
+ def bad_connect(self, expected, user='guest', passcode='guest', **kwargs):
+ self.conn.disconnect()
+ new_conn = self.create_connection_obj(**kwargs)
+ listener = base.WaitableListener()
+ new_conn.set_listener('', listener)
+ try:
+ new_conn.connect(user, passcode)
+ self.assertTrue(listener.wait())
+ self.assertEqual(expected, listener.errors[0]['message'])
+ finally:
+ if new_conn.is_connected():
+ new_conn.disconnect()
+
+ def test_bad_header_on_send(self):
+ ''' Test disallowed header on SEND '''
+ self.listener.reset(1)
+ self.conn.send_frame("SEND", {"destination":"a", "message-id":"1"})
+ self.assertTrue(self.listener.wait())
+ self.assertEqual(1, len(self.listener.errors))
+ errorReceived = self.listener.errors[0]
+ self.assertEqual("Invalid header", errorReceived['headers']['message'])
+ self.assertEqual("'message-id' is not allowed on 'SEND'.\n", errorReceived['message'])
+
+ def test_send_recv_header(self):
+ ''' Test sending a custom header and receiving it back '''
+ dest = '/queue/custom-header'
+ hdrs = {'x-custom-header-1': 'value1',
+ 'x-custom-header-2': 'value2',
+ 'custom-header-3': 'value3'}
+ self.listener.reset(1)
+ recv_hdrs = self.simple_test_send_rec(dest, headers=hdrs)
+ self.assertEqual('value1', recv_hdrs['x-custom-header-1'])
+ self.assertEqual('value2', recv_hdrs['x-custom-header-2'])
+ self.assertEqual('value3', recv_hdrs['custom-header-3'])
+
+ def test_disconnect(self):
+ ''' Test DISCONNECT command'''
+ self.conn.disconnect()
+ # Note: with modern-ish stomp.py versions, connection does not transition
+ # to the disconnected state immediately, and asserting on it in this test
+ # without a receipt makes no sense
+
+ def test_disconnect_with_receipt(self):
+ ''' Test the DISCONNECT command with receipts '''
+ time.sleep(3)
+ self.listener.reset(1)
+ self.conn.send_frame("DISCONNECT", {"receipt": "test"})
+ self.assertTrue(self.listener.wait())
+ self.assertEqual(1, len(self.listener.receipts))
+ receiptReceived = self.listener.receipts[0]['headers']['receipt-id']
+ self.assertEqual("test", receiptReceived
+ , "Wrong receipt received: '" + receiptReceived + "'")
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py
index 76e5402686..1b55792f65 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py
@@ -36,10 +36,8 @@ class TestExchange(base.BaseTest):
ack="auto")
self.assertListener("Expecting an error", numErrs=1)
err = self.listener.errors[0]
- self.assertEquals("not_found", err['headers']['message'])
- self.assertEquals(
- "NOT_FOUND - no exchange 'does.not.exist' in vhost '/'\n",
- err['message'])
+ self.assertEqual("not_found", err['headers']['message'])
+ self.assertRegex(err['message'], r'^NOT_FOUND')
time.sleep(1)
self.assertFalse(self.conn.is_connected())
@@ -60,6 +58,19 @@ class TestQueue(base.BaseTest):
destination = '/queue/test'
self.simple_test_send_rec(destination)
+ def test_send_recv_header(self):
+ ''' Test sending a custom header and receiving it back '''
+ dest = '/queue/custom-header'
+ hdrs = {'x-custom-header-1': 'value1',
+ 'x-custom-header-2': 'value2',
+ 'custom-header-3': 'value3'}
+ self.listener.reset(1)
+ recv_hdrs = self.simple_test_send_rec(dest, headers=hdrs)
+ self.assertEqual('value1', recv_hdrs['x-custom-header-1'])
+ self.assertEqual('value2', recv_hdrs['x-custom-header-2'])
+ self.assertEqual('value3', recv_hdrs['custom-header-3'])
+
+
def test_send_receive_in_other_conn(self):
''' Test send in one connection, receive in another '''
destination = '/queue/test2'
@@ -74,7 +85,7 @@ class TestQueue(base.BaseTest):
conn2.set_listener('', listener2)
self.subscribe_dest(conn2, destination, None, ack="auto")
- self.assertTrue(listener2.wait(10), "no receive")
+ self.assertTrue(listener2.wait_for_complete_countdown(), "no receive")
finally:
conn2.disconnect()
@@ -84,7 +95,7 @@ class TestQueue(base.BaseTest):
# send
self.conn.send(destination, "hello thar", receipt="foo")
- self.listener.wait(3)
+ self.listener.wait_for_complete_countdown(3)
self.conn.disconnect()
# now receive
@@ -94,7 +105,7 @@ class TestQueue(base.BaseTest):
conn2.set_listener('', listener2)
self.subscribe_dest(conn2, destination, None, ack="auto")
- self.assertTrue(listener2.wait(10), "no receive")
+ self.assertTrue(listener2.wait_for_complete_countdown(), "no receive")
finally:
conn2.disconnect()
@@ -114,10 +125,10 @@ class TestQueue(base.BaseTest):
## expect both consumers to get a message?
self.assertTrue(listener1.wait(2))
- self.assertEquals(1, len(listener1.messages),
+ self.assertEqual(1, len(listener1.messages),
"unexpected message count")
self.assertTrue(listener2.wait(2))
- self.assertEquals(1, len(listener2.messages),
+ self.assertEqual(1, len(listener2.messages),
"unexpected message count")
finally:
conn1.disconnect()
@@ -155,7 +166,7 @@ class TestQueue(base.BaseTest):
self.assertListener("Missing messages/receipts", numMsgs=3, numRcts=2, timeout=3)
- self.assertEquals(set(['a','b']), self.__gather_receipts())
+ self.assertEqual(set(['a','b']), self.__gather_receipts())
def test_interleaved_receipt_no_receipt_tx(self):
''' Test i-leaved receipt/no receipt, no-r bracketed by r+xactions '''
@@ -179,7 +190,7 @@ class TestQueue(base.BaseTest):
expected = set(['a', 'b'])
missing = expected.difference(self.__gather_receipts())
- self.assertEquals(set(), missing, "Missing receipts: " + str(missing))
+ self.assertEqual(set(), missing, "Missing receipts: " + str(missing))
def test_interleaved_receipt_no_receipt_inverse(self):
''' Test i-leaved receipt/no receipt, r bracketed by no-rs '''
@@ -195,7 +206,7 @@ class TestQueue(base.BaseTest):
self.assertListener("Missing messages/receipt", numMsgs=3, numRcts=1, timeout=3)
- self.assertEquals(set(['a']), self.__gather_receipts())
+ self.assertEqual(set(['a']), self.__gather_receipts())
def __test_send_receipt(self, destination, before, after, headers = {}):
count = 50
@@ -211,12 +222,12 @@ class TestQueue(base.BaseTest):
receipt=receipt, headers=headers)
after()
- self.assertTrue(self.listener.wait(5))
+ self.assertTrue(self.listener.wait_for_complete_countdown())
missing_receipts = expected_receipts.difference(
self.__gather_receipts())
- self.assertEquals(set(), missing_receipts,
+ self.assertEqual(set(), missing_receipts,
"missing receipts: " + str(missing_receipts))
def __gather_receipts(self):
@@ -250,11 +261,11 @@ class TestTopic(base.BaseTest):
self.conn.send(destination, "test2")
## expect both consumers to get both messages
- self.assertTrue(listener1.wait(5))
- self.assertEquals(2, len(listener1.messages),
+ self.assertTrue(listener1.wait_for_complete_countdown())
+ self.assertEqual(2, len(listener1.messages),
"unexpected message count")
- self.assertTrue(listener2.wait(5))
- self.assertEquals(2, len(listener2.messages),
+ self.assertTrue(listener2.wait_for_complete_countdown())
+ self.assertEqual(2, len(listener2.messages),
"unexpected message count")
finally:
conn1.disconnect()
@@ -278,13 +289,13 @@ class TestTopic(base.BaseTest):
self.conn.send(destination, message)
self.assertTrue(listener1.wait(10))
- self.assertEquals(2, len(listener1.messages),
+ self.assertEqual(2, len(listener1.messages),
"unexpected message count")
self.assertTrue(len(listener2.messages[0]['message']) == s,
"unexpected message size")
self.assertTrue(listener2.wait(10))
- self.assertEquals(2, len(listener2.messages),
+ self.assertEqual(2, len(listener2.messages),
"unexpected message count")
finally:
conn1.disconnect()
@@ -292,71 +303,7 @@ class TestTopic(base.BaseTest):
class TestReplyQueue(base.BaseTest):
- def test_reply_queue(self):
- ''' Test with two separate clients. Client 1 sends
- message to a known destination with a defined reply
- queue. Client 2 receives on known destination and replies
- on the reply destination. Client 1 gets the reply message'''
-
- known = '/queue/known'
- reply = '/temp-queue/0'
-
- ## Client 1 uses pre-supplied connection and listener
- ## Set up client 2
- conn2, listener2 = self.create_subscriber_connection(known)
-
- try:
- self.conn.send(known, "test",
- headers = {"reply-to": reply})
-
- self.assertTrue(listener2.wait(5))
- self.assertEquals(1, len(listener2.messages))
-
- reply_to = listener2.messages[0]['headers']['reply-to']
- self.assertTrue(reply_to.startswith('/reply-queue/'))
-
- conn2.send(reply_to, "reply")
- self.assertTrue(self.listener.wait(5))
- self.assertEquals("reply", self.listener.messages[0]['message'])
- finally:
- conn2.disconnect()
-
- def test_reuse_reply_queue(self):
- ''' Test re-use of reply-to queue '''
-
- known2 = '/queue/known2'
- known3 = '/queue/known3'
- reply = '/temp-queue/foo'
-
- def respond(cntn, listna):
- self.assertTrue(listna.wait(5))
- self.assertEquals(1, len(listna.messages))
- reply_to = listna.messages[0]['headers']['reply-to']
- self.assertTrue(reply_to.startswith('/reply-queue/'))
- cntn.send(reply_to, "reply")
-
- ## Client 1 uses pre-supplied connection and listener
- ## Set up clients 2 and 3
- conn2, listener2 = self.create_subscriber_connection(known2)
- conn3, listener3 = self.create_subscriber_connection(known3)
- try:
- self.listener.reset(2)
- self.conn.send(known2, "test2",
- headers = {"reply-to": reply})
- self.conn.send(known3, "test3",
- headers = {"reply-to": reply})
- respond(conn2, listener2)
- respond(conn3, listener3)
-
- self.assertTrue(self.listener.wait(5))
- self.assertEquals(2, len(self.listener.messages))
- self.assertEquals("reply", self.listener.messages[0]['message'])
- self.assertEquals("reply", self.listener.messages[1]['message'])
- finally:
- conn2.disconnect()
- conn3.disconnect()
-
- def test_perm_reply_queue(self):
+ def test_durable_known_reply_queue(self):
'''As test_reply_queue, but with a non-temp reply queue'''
known = '/queue/known'
@@ -371,15 +318,15 @@ class TestReplyQueue(base.BaseTest):
conn1.send(known, "test",
headers = {"reply-to": reply})
- self.assertTrue(listener2.wait(5))
- self.assertEquals(1, len(listener2.messages))
+ self.assertTrue(listener2.wait_for_complete_countdown())
+ self.assertEqual(1, len(listener2.messages))
reply_to = listener2.messages[0]['headers']['reply-to']
self.assertTrue(reply_to == reply)
conn2.send(reply_to, "reply")
- self.assertTrue(listener1.wait(5))
- self.assertEquals("reply", listener1.messages[0]['message'])
+ self.assertTrue(listener1.wait_for_complete_countdown())
+ self.assertEqual("reply", listener1.messages[0]['message'])
finally:
conn1.disconnect()
conn2.disconnect()
@@ -403,20 +350,20 @@ class TestDurableSubscription(base.BaseTest):
if not listener:
listener = self.listener
- self.assertTrue(listener.wait(5))
- self.assertEquals(1, len(self.listener.receipts))
+ self.assertTrue(listener.wait_for_complete_countdown())
+ self.assertEqual(1, len(self.listener.receipts))
if pos is not None:
- self.assertEquals(pos, self.listener.receipts[0]['msg_no'])
+ self.assertEqual(pos, self.listener.receipts[0]['msg_no'])
def __assert_message(self, msg, listener=None, pos=None):
if not listener:
listener = self.listener
- self.assertTrue(listener.wait(5))
- self.assertEquals(1, len(listener.messages))
- self.assertEquals(msg, listener.messages[0]['message'])
+ self.assertTrue(listener.wait_for_complete_countdown())
+ self.assertEqual(1, len(listener.messages))
+ self.assertEqual(msg, listener.messages[0]['message'])
if pos is not None:
- self.assertEquals(pos, self.listener.messages[0]['msg_no'])
+ self.assertEqual(pos, self.listener.messages[0]['msg_no'])
def do_test_durable_subscription(self, durability_header):
destination = '/topic/durable'
@@ -453,8 +400,8 @@ class TestDurableSubscription(base.BaseTest):
# resubscribe and expect no message
self.__subscribe(destination)
self.assertTrue(self.listener.wait(3))
- self.assertEquals(0, len(self.listener.messages))
- self.assertEquals(1, len(self.listener.receipts))
+ self.assertEqual(0, len(self.listener.messages))
+ self.assertEqual(1, len(self.listener.receipts))
def test_durable_subscription(self):
self.do_test_durable_subscription('durable')
@@ -477,12 +424,13 @@ class TestDurableSubscription(base.BaseTest):
self.listener.reset(100)
+ n = 100
# send 100 messages
- for x in range(0, 100):
+ for x in range(0, n):
self.conn.send(destination, "msg" + str(x))
- self.assertTrue(self.listener.wait(5))
- self.assertEquals(100, len(self.listener.messages))
+ self.assertTrue(self.listener.wait_for_complete_countdown())
+ self.assertEqual(n, len(self.listener.messages))
finally:
conn2.disconnect()
@@ -502,20 +450,20 @@ class TestDurableSubscription(base.BaseTest):
self.unsubscribe_dest(self.conn, destination, TestDurableSubscription.ID)
self.unsubscribe_dest(conn2, destination, "other.id")
- self.listener.reset(101)
- listener2.reset(101) ## 100 messages and 1 receipt
+ self.listener.reset(11)
+ listener2.reset(11) ## 10 messages and 1 receipt
# send 100 messages
- for x in range(0, 100):
+ for x in range(0, 10):
self.conn.send(destination, "msg" + str(x))
self.__subscribe(destination)
self.__subscribe(destination, conn2, "other.id")
for l in [self.listener, listener2]:
- self.assertTrue(l.wait(20))
- self.assertTrue(len(l.messages) >= 90)
- self.assertTrue(len(l.messages) <= 100)
+ self.assertTrue(l.wait_for_complete_countdown())
+ self.assertTrue(len(l.messages) >= 9)
+ self.assertTrue(len(l.messages) <= 10)
finally:
conn2.disconnect()
@@ -526,11 +474,19 @@ class TestDurableSubscription(base.BaseTest):
self.conn.send_frame('SUBSCRIBE',
{'destination': destination, 'ack': 'auto', header: 'true'})
self.listener.wait(3)
- self.assertEquals(1, len(self.listener.errors))
- self.assertEquals("Missing Header", self.listener.errors[0]['headers']['message'])
+ self.assertEqual(1, len(self.listener.errors))
+ self.assertEqual("Missing Header", self.listener.errors[0]['headers']['message'])
def test_durable_subscribe_no_id(self):
self.do_test_durable_subscribe_no_id_and_header('durable')
def test_durable_subscribe_no_id_and_legacy_header(self):
self.do_test_durable_subscribe_no_id_and_header('persistent')
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py
index 884ada50e8..20a5eaa0a9 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py
@@ -20,10 +20,10 @@ class TestErrorsAndCloseConnection(base.BaseTest):
self.assertTrue(self.listener.wait())
- self.assertEquals(1, len(self.listener.errors))
+ self.assertEqual(1, len(self.listener.errors))
errorReceived = self.listener.errors[0]
- self.assertEquals("Duplicated subscription identifier", errorReceived['headers']['message'])
- self.assertEquals("A subscription identified by 'T_1' already exists.", errorReceived['message'])
+ self.assertEqual("Duplicated subscription identifier", errorReceived['headers']['message'])
+ self.assertEqual("A subscription identified by 'T_1' already exists.", errorReceived['message'])
time.sleep(2)
self.assertFalse(self.conn.is_connected())
@@ -66,10 +66,10 @@ class TestErrors(base.BaseTest):
self.conn.send("/something/interesting", 'test_unknown_destination')
self.assertTrue(self.listener.wait())
- self.assertEquals(1, len(self.listener.errors))
+ self.assertEqual(1, len(self.listener.errors))
err = self.listener.errors[0]
- self.assertEquals("Unknown destination", err['headers']['message'])
+ self.assertEqual("Unknown destination", err['headers']['message'])
def test_send_missing_destination(self):
self.__test_missing_destination("SEND")
@@ -82,20 +82,28 @@ class TestErrors(base.BaseTest):
self.conn.send_frame(command)
self.assertTrue(self.listener.wait())
- self.assertEquals(1, len(self.listener.errors))
+ self.assertEqual(1, len(self.listener.errors))
err = self.listener.errors[0]
- self.assertEquals("Missing destination", err['headers']['message'])
+ self.assertEqual("Missing destination", err['headers']['message'])
def __test_invalid_destination(self, dtype, content):
self.listener.reset()
self.conn.send("/" + dtype + content, '__test_invalid_destination:' + dtype + content)
self.assertTrue(self.listener.wait())
- self.assertEquals(1, len(self.listener.errors))
+ self.assertEqual(1, len(self.listener.errors))
err = self.listener.errors[0]
- self.assertEquals("Invalid destination", err['headers']['message'])
- self.assertEquals("'" + content + "' is not a valid " +
+ self.assertEqual("Invalid destination", err['headers']['message'])
+ self.assertEqual("'" + content + "' is not a valid " +
dtype + " destination\n",
err['message'])
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_options.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/implicit_connect.py
index f71c4acf70..4ea49d5d8c 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_options.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/implicit_connect.py
@@ -8,10 +8,16 @@
import unittest
import stomp
import base
-import test_util
+import time
import os
+import threading
+
+import test_util
-class TestConnectOptions(base.BaseTest):
+class TestImplicitConnect(base.BaseTest):
+ """
+ Relies on implicit connect being enabled on the node
+ """
def test_implicit_connect(self):
''' Implicit connect with receipt on first command '''
@@ -21,31 +27,24 @@ class TestConnectOptions(base.BaseTest):
new_conn = stomp.Connection(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))])
new_conn.set_listener('', listener)
- new_conn.start() # not going to issue connect
+ new_conn.transport.start()
+
self.subscribe_dest(new_conn, "/topic/implicit", 'sub_implicit',
receipt='implicit')
try:
self.assertTrue(listener.wait(5))
- self.assertEquals(1, len(listener.receipts),
+ self.assertEqual(1, len(listener.receipts),
'Missing receipt. Likely not connected')
- self.assertEquals('implicit', listener.receipts[0]['headers']['receipt-id'])
+ self.assertEqual('implicit', listener.receipts[0]['headers']['receipt-id'])
finally:
new_conn.disconnect()
test_util.disable_implicit_connect()
- def test_default_user(self):
- ''' Default user connection '''
- self.conn.disconnect()
- test_util.enable_default_user()
- listener = base.WaitableListener()
- new_conn = stomp.Connection(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))])
- new_conn.set_listener('', listener)
- new_conn.start()
- new_conn.connect()
- try:
- self.assertFalse(listener.wait(3)) # no error back
- self.assertTrue(new_conn.is_connected())
- finally:
- new_conn.disconnect()
- test_util.disable_default_user()
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/implicit_connect_runner.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/implicit_connect_runner.py
new file mode 100755
index 0000000000..7acfda26b6
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/implicit_connect_runner.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python3
+
+import test_runner
+
+if __name__ == '__main__':
+ modules = [
+ 'implicit_connect'
+ ]
+ test_runner.run_unittests(modules)
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/lifecycle.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/lifecycle.py
deleted file mode 100644
index d7b558e7b5..0000000000
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/lifecycle.py
+++ /dev/null
@@ -1,187 +0,0 @@
-## This Source Code Form is subject to the terms of the Mozilla Public
-## License, v. 2.0. If a copy of the MPL was not distributed with this
-## file, You can obtain one at https://mozilla.org/MPL/2.0/.
-##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-##
-
-import unittest
-import stomp
-import base
-import time
-
-class TestLifecycle(base.BaseTest):
-
- def test_unsubscribe_exchange_destination(self):
- ''' Test UNSUBSCRIBE command with exchange'''
- d = "/exchange/amq.fanout"
- self.unsub_test(d, self.sub_and_send(d))
-
- def test_unsubscribe_exchange_destination_with_receipt(self):
- ''' Test receipted UNSUBSCRIBE command with exchange'''
- d = "/exchange/amq.fanout"
- self.unsub_test(d, self.sub_and_send(d, receipt="unsub.rct"), numRcts=1)
-
- def test_unsubscribe_queue_destination(self):
- ''' Test UNSUBSCRIBE command with queue'''
- d = "/queue/unsub01"
- self.unsub_test(d, self.sub_and_send(d))
-
- def test_unsubscribe_queue_destination_with_receipt(self):
- ''' Test receipted UNSUBSCRIBE command with queue'''
- d = "/queue/unsub02"
- self.unsub_test(d, self.sub_and_send(d, receipt="unsub.rct"), numRcts=1)
-
- def test_unsubscribe_exchange_id(self):
- ''' Test UNSUBSCRIBE command with exchange by id'''
- d = "/exchange/amq.fanout"
- self.unsub_test(d, self.sub_and_send(d, subid="exchid"))
-
- def test_unsubscribe_exchange_id_with_receipt(self):
- ''' Test receipted UNSUBSCRIBE command with exchange by id'''
- d = "/exchange/amq.fanout"
- self.unsub_test(d, self.sub_and_send(d, subid="exchid", receipt="unsub.rct"), numRcts=1)
-
- def test_unsubscribe_queue_id(self):
- ''' Test UNSUBSCRIBE command with queue by id'''
- d = "/queue/unsub03"
- self.unsub_test(d, self.sub_and_send(d, subid="queid"))
-
- def test_unsubscribe_queue_id_with_receipt(self):
- ''' Test receipted UNSUBSCRIBE command with queue by id'''
- d = "/queue/unsub04"
- self.unsub_test(d, self.sub_and_send(d, subid="queid", receipt="unsub.rct"), numRcts=1)
-
- def test_connect_version_1_0(self):
- ''' Test CONNECT with version 1.0'''
- self.conn.disconnect()
- new_conn = self.create_connection(version="1.0")
- try:
- self.assertTrue(new_conn.is_connected())
- finally:
- new_conn.disconnect()
- self.assertFalse(new_conn.is_connected())
-
- def test_connect_version_1_1(self):
- ''' Test CONNECT with version 1.1'''
- self.conn.disconnect()
- new_conn = self.create_connection(version="1.1")
- try:
- self.assertTrue(new_conn.is_connected())
- finally:
- new_conn.disconnect()
- self.assertFalse(new_conn.is_connected())
-
- def test_connect_version_1_2(self):
- ''' Test CONNECT with version 1.2'''
- self.conn.disconnect()
- new_conn = self.create_connection(version="1.2")
- try:
- self.assertTrue(new_conn.is_connected())
- finally:
- new_conn.disconnect()
- self.assertFalse(new_conn.is_connected())
-
- def test_heartbeat_disconnects_client(self):
- ''' Test heart-beat disconnection'''
- self.conn.disconnect()
- new_conn = self.create_connection(version='1.1', heartbeats=(1500, 0))
- try:
- self.assertTrue(new_conn.is_connected())
- time.sleep(1)
- self.assertTrue(new_conn.is_connected())
- time.sleep(3)
- self.assertFalse(new_conn.is_connected())
- finally:
- if new_conn.is_connected():
- new_conn.disconnect()
-
- def test_unsupported_version(self):
- ''' Test unsupported version on CONNECT command'''
- self.bad_connect("Supported versions are 1.0,1.1,1.2\n", version='100.1')
-
- def test_bad_username(self):
- ''' Test bad username'''
- self.bad_connect("Access refused for user 'gust'\n", user='gust')
-
- def test_bad_password(self):
- ''' Test bad password'''
- self.bad_connect("Access refused for user 'guest'\n", passcode='gust')
-
- def test_bad_vhost(self):
- ''' Test bad virtual host'''
- self.bad_connect("Virtual host '//' access denied", version='1.1', vhost='//')
-
- def bad_connect(self, expected, user='guest', passcode='guest', **kwargs):
- self.conn.disconnect()
- new_conn = self.create_connection_obj(**kwargs)
- listener = base.WaitableListener()
- new_conn.set_listener('', listener)
- try:
- new_conn.start()
- new_conn.connect(user, passcode)
- self.assertTrue(listener.wait())
- self.assertEquals(expected, listener.errors[0]['message'])
- finally:
- if new_conn.is_connected():
- new_conn.disconnect()
-
- def test_bad_header_on_send(self):
- ''' Test disallowed header on SEND '''
- self.listener.reset(1)
- self.conn.send_frame("SEND", {"destination":"a", "message-id":"1"})
- self.assertTrue(self.listener.wait())
- self.assertEquals(1, len(self.listener.errors))
- errorReceived = self.listener.errors[0]
- self.assertEquals("Invalid header", errorReceived['headers']['message'])
- self.assertEquals("'message-id' is not allowed on 'SEND'.\n", errorReceived['message'])
-
- def test_send_recv_header(self):
- ''' Test sending a custom header and receiving it back '''
- dest = '/queue/custom-header'
- hdrs = {'x-custom-header-1': 'value1',
- 'x-custom-header-2': 'value2',
- 'custom-header-3': 'value3'}
- self.listener.reset(1)
- recv_hdrs = self.simple_test_send_rec(dest, headers=hdrs)
- self.assertEquals('value1', recv_hdrs['x-custom-header-1'])
- self.assertEquals('value2', recv_hdrs['x-custom-header-2'])
- self.assertEquals('value3', recv_hdrs['custom-header-3'])
-
- def test_disconnect(self):
- ''' Test DISCONNECT command'''
- self.conn.disconnect()
- self.assertFalse(self.conn.is_connected())
-
- def test_disconnect_with_receipt(self):
- ''' Test the DISCONNECT command with receipts '''
- time.sleep(3)
- self.listener.reset(1)
- self.conn.send_frame("DISCONNECT", {"receipt": "test"})
- self.assertTrue(self.listener.wait())
- self.assertEquals(1, len(self.listener.receipts))
- receiptReceived = self.listener.receipts[0]['headers']['receipt-id']
- self.assertEquals("test", receiptReceived
- , "Wrong receipt received: '" + receiptReceived + "'")
-
- def unsub_test(self, dest, verbs, numRcts=0):
- def afterfun():
- self.conn.send(dest, "after-test")
- subverb, unsubverb = verbs
- self.assertListenerAfter(subverb, numMsgs=1,
- errMsg="FAILED to subscribe and send")
- self.assertListenerAfter(unsubverb, numRcts=numRcts,
- errMsg="Incorrect responses from UNSUBSCRIBE")
- self.assertListenerAfter(afterfun,
- errMsg="Still receiving messages")
-
- def sub_and_send(self, dest, subid=None, receipt=None):
- def subfun():
- self.subscribe_dest(self.conn, dest, subid)
- self.conn.send(dest, "test")
- def unsubfun():
- headers = {}
- if receipt != None:
- headers['receipt'] = receipt
- self.unsubscribe_dest(self.conn, dest, subid, **headers)
- return subfun, unsubfun
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/main_runner.py
index 01967465a2..b0db6a1f79 100755
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/main_runner.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
import test_runner
@@ -6,7 +6,7 @@ if __name__ == '__main__':
modules = [
'parsing',
'errors',
- 'lifecycle',
+ 'connect_disconnect',
'ack',
'amqp_headers',
'queue_properties',
@@ -16,6 +16,8 @@ if __name__ == '__main__':
'destinations',
'redelivered',
'topic_permissions',
- 'x_queue_type_quorum'
+ 'unsubscribe',
+ 'x_queue_type_quorum',
+ 'x_queue_type_stream'
]
test_runner.run_unittests(modules)
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py
index 40f908c5d9..8678887144 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py
@@ -329,3 +329,11 @@ class TestParsing(unittest.TestCase):
self.assertEqual(bodybuf, bodyresp,
" body ('%s')\nincorrectly returned as ('%s')"
% (bodyresp, bodybuf))
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py
index 3761c92360..209edb586d 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py
@@ -85,3 +85,11 @@ class TestQueueProperties(base.BaseTest):
self.conn.disconnect()
connection.close()
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py
index 3dfdd72cc9..b526843bb4 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py
@@ -20,8 +20,8 @@ class TestRedelivered(base.BaseTest):
self.conn.send(destination, "test1")
message_receive_timeout = 30
self.assertTrue(self.listener.wait(message_receive_timeout), "Test message not received within {0} seconds".format(message_receive_timeout))
- self.assertEquals(1, len(self.listener.messages))
- self.assertEquals('false', self.listener.messages[0]['headers']['redelivered'])
+ self.assertEqual(1, len(self.listener.messages))
+ self.assertEqual('false', self.listener.messages[0]['headers']['redelivered'])
# disconnect with no ack
self.conn.disconnect()
@@ -34,7 +34,14 @@ class TestRedelivered(base.BaseTest):
conn2.set_listener('', listener2)
self.subscribe_dest(conn2, destination, None, ack='client')
self.assertTrue(listener2.wait(), "message not received again")
- self.assertEquals(1, len(listener2.messages))
- self.assertEquals('true', listener2.messages[0]['headers']['redelivered'])
+ self.assertEqual(1, len(listener2.messages))
+ self.assertEqual('true', listener2.messages[0]['headers']['redelivered'])
finally:
conn2.disconnect()
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py
index 6fbcb3d492..8fe156e6da 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py
@@ -32,10 +32,18 @@ class TestReliability(base.BaseTest):
pub_conn.disconnect()
if listener.wait(30):
- self.assertEquals(count, len(listener.messages))
+ self.assertEqual(count, len(listener.messages))
else:
listener.print_state("Final state of listener:")
self.fail("Did not receive %s messages in time" % count)
finally:
if pub_conn.is_connected():
pub_conn.disconnect()
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_connect_options.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_connect_options.py
deleted file mode 100755
index 10efa4fbb4..0000000000
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_connect_options.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python
-
-## This Source Code Form is subject to the terms of the Mozilla Public
-## License, v. 2.0. If a copy of the MPL was not distributed with this
-## file, You can obtain one at https://mozilla.org/MPL/2.0/.
-##
-## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-##
-
-import test_runner
-
-if __name__ == '__main__':
- modules = ['connect_options']
- test_runner.run_unittests(modules)
-
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py
index 9aa5855b02..e0d22b0245 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
## This Source Code Form is subject to the terms of the Mozilla Public
## License, v. 2.0. If a copy of the MPL was not distributed with this
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py
index 911100c54f..ab56757d5a 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py
@@ -35,13 +35,13 @@ def disable_default_user():
def switch_config(implicit_connect='', default_user=''):
cmd = ''
cmd += 'ok = io:format("~n===== Ranch listeners (before stop) =====~n~n~p~n", [ranch:info()]),'
- cmd += 'ok = application:stop(rabbitmq_stomp),'
+ cmd += '_ = application:stop(rabbitmq_stomp),'
cmd += 'io:format("~n===== Ranch listeners (after stop) =====~n~n~p~n", [ranch:info()]),'
if implicit_connect:
cmd += 'ok = application:set_env(rabbitmq_stomp,implicit_connect,{}),'.format(implicit_connect)
if default_user:
cmd += 'ok = application:set_env(rabbitmq_stomp,default_user,{}),'.format(default_user)
- cmd += 'ok = application:start(rabbitmq_stomp),'
+ cmd += '_ = application:start(rabbitmq_stomp),'
cmd += 'io:format("~n===== Ranch listeners (after start) =====~n~n~p~n", [ranch:info()]).'
rabbitmqctl(['eval', cmd])
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/ssl_lifecycle.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/tls_connect_disconnect.py
index 570ad9f5a3..faeaed2ffe 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/ssl_lifecycle.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/tls_connect_disconnect.py
@@ -20,7 +20,7 @@ ssl_key_file = os.path.join(os.getenv('SSL_CERTS_PATH'), 'client', 'key.pem')
ssl_cert_file = os.path.join(os.getenv('SSL_CERTS_PATH'), 'client', 'cert.pem')
ssl_ca_certs = os.path.join(os.getenv('SSL_CERTS_PATH'), 'testca', 'cacert.pem')
-class TestSslClient(unittest.TestCase):
+class TestTLSConnection(unittest.TestCase):
def __ssl_connect(self):
conn = stomp.Connection(host_and_ports = [ ('localhost', int(os.environ["STOMP_PORT_TLS"])) ],
@@ -28,7 +28,6 @@ class TestSslClient(unittest.TestCase):
ssl_cert_file = ssl_cert_file,
ssl_ca_certs = ssl_ca_certs)
print("FILE: ".format(ssl_cert_file))
- conn.start()
conn.connect("guest", "guest")
return conn
@@ -37,7 +36,6 @@ class TestSslClient(unittest.TestCase):
use_ssl = True, ssl_key_file = ssl_key_file,
ssl_cert_file = ssl_cert_file,
ssl_ca_certs = ssl_ca_certs)
- conn.start()
conn.connect()
return conn
@@ -68,7 +66,7 @@ class TestSslClient(unittest.TestCase):
self.assertTrue(listener.wait(1))
- self.assertEquals("sub",
+ self.assertEqual("sub",
listener.receipts[0]['headers']['receipt-id'])
listener.reset(1)
@@ -76,6 +74,13 @@ class TestSslClient(unittest.TestCase):
self.assertTrue(listener.wait())
- self.assertEquals("Hello SSL!", listener.messages[0]['message'])
+ self.assertEqual("Hello SSL!", listener.messages[0]['message'])
finally:
conn.disconnect()
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_ssl.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/tls_runner.py
index 95d2d2baa7..5f95a0b700 100755
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_ssl.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/tls_runner.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
## This Source Code Form is subject to the terms of the Mozilla Public
## License, v. 2.0. If a copy of the MPL was not distributed with this
@@ -11,7 +11,7 @@ import test_runner
import test_util
if __name__ == '__main__':
- modules = ['ssl_lifecycle']
+ modules = ['tls_connect_disconnect']
test_util.ensure_ssl_auth_user()
test_runner.run_unittests(modules)
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py
index 6272f6d8b5..f6513e318f 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py
@@ -50,3 +50,11 @@ class TestTopicPermissions(base.BaseTest):
# assert errors
self.assertGreater(len(self.listener.errors), 0)
self.assertIn("ACCESS_REFUSED", self.listener.errors[0]['message'])
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py
index 379806bfb8..905d334d4b 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py
@@ -25,16 +25,16 @@ class TestTransactions(base.BaseTest):
## should see the second message
self.assertTrue(self.listener.wait(3))
- self.assertEquals(1, len(self.listener.messages))
- self.assertEquals("again!", self.listener.messages[0]['message'])
+ self.assertEqual(1, len(self.listener.messages))
+ self.assertEqual("again!", self.listener.messages[0]['message'])
## now look for the first message
self.listener.reset()
self.conn.commit(transaction=tx)
self.assertTrue(self.listener.wait(3))
- self.assertEquals(1, len(self.listener.messages),
+ self.assertEqual(1, len(self.listener.messages),
"Missing committed message")
- self.assertEquals("hello!", self.listener.messages[0]['message'])
+ self.assertEqual("hello!", self.listener.messages[0]['message'])
def test_tx_abort(self):
''' Test TX with an ABORT and ensure messages are discarded '''
@@ -49,13 +49,19 @@ class TestTransactions(base.BaseTest):
## should see the second message
self.assertTrue(self.listener.wait(3))
- self.assertEquals(1, len(self.listener.messages))
- self.assertEquals("again!", self.listener.messages[0]['message'])
+ self.assertEqual(1, len(self.listener.messages))
+ self.assertEqual("again!", self.listener.messages[0]['message'])
## now look for the first message to be discarded
self.listener.reset()
self.conn.abort(transaction=tx)
self.assertFalse(self.listener.wait(3))
- self.assertEquals(0, len(self.listener.messages),
+ self.assertEqual(0, len(self.listener.messages),
"Unexpected committed message")
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/unsubscribe.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/unsubscribe.py
new file mode 100644
index 0000000000..8552715f2b
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/unsubscribe.py
@@ -0,0 +1,88 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import time
+import threading
+
+class TestLifecycle(base.BaseTest):
+
+ def test_unsubscribe_exchange_destination(self):
+ ''' Test UNSUBSCRIBE command with exchange'''
+ d = "/exchange/amq.fanout"
+ self.unsub_test(d, self.sub_and_send(d))
+
+ def test_unsubscribe_exchange_destination_with_receipt(self):
+ ''' Test receipted UNSUBSCRIBE command with exchange'''
+ d = "/exchange/amq.fanout"
+ self.unsub_test(d, self.sub_and_send(d, receipt="unsub.rct"), numRcts=1)
+
+ def test_unsubscribe_queue_destination(self):
+ ''' Test UNSUBSCRIBE command with queue'''
+ d = "/queue/test_unsubscribe_queue_destination"
+ self.unsub_test(d, self.sub_and_send(d))
+
+ def test_unsubscribe_queue_destination_with_receipt(self):
+ ''' Test receipted UNSUBSCRIBE command with queue'''
+ d = "/queue/test_unsubscribe_queue_destination_with_receipt"
+ self.unsub_test(d, self.sub_and_send(d, receipt="unsub.rct"), numRcts=1)
+
+ def test_unsubscribe_exchange_id(self):
+ ''' Test UNSUBSCRIBE command with exchange by id'''
+ d = "/exchange/amq.fanout"
+ self.unsub_test(d, self.sub_and_send(d, subid="exchid"))
+
+ def test_unsubscribe_exchange_id_with_receipt(self):
+ ''' Test receipted UNSUBSCRIBE command with exchange by id'''
+ d = "/exchange/amq.fanout"
+ self.unsub_test(d, self.sub_and_send(d, subid="exchid", receipt="unsub.rct"), numRcts=1)
+
+ def test_unsubscribe_queue_id(self):
+ ''' Test UNSUBSCRIBE command with queue by id'''
+ d = "/queue/test_unsubscribe_queue_id"
+ self.unsub_test(d, self.sub_and_send(d, subid="queid"))
+
+ def test_unsubscribe_queue_id_with_receipt(self):
+ ''' Test receipted UNSUBSCRIBE command with queue by id'''
+ d = "/queue/test_unsubscribe_queue_id_with_receipt"
+ self.unsub_test(d, self.sub_and_send(d, subid="queid", receipt="unsub.rct"), numRcts=1)
+
+ ##
+ ## Helpers
+ ##
+
+ def unsub_test(self, dest, verbs, numRcts=0):
+ def afterfun():
+ self.conn.send(dest, "after-test")
+ subverb, unsubverb = verbs
+ self.assertListenerAfter(subverb, numMsgs=1,
+ errMsg="FAILED to subscribe and send")
+ self.assertListenerAfter(unsubverb, numRcts=numRcts,
+ errMsg="Incorrect responses from UNSUBSCRIBE")
+ self.assertListenerAfter(afterfun,
+ errMsg="Still receiving messages")
+
+ def sub_and_send(self, dest, subid=None, receipt=None):
+ def subfun():
+ self.subscribe_dest(self.conn, dest, subid)
+ time.sleep(1)
+ self.conn.send(dest, "test")
+ def unsubfun():
+ headers = {}
+ if receipt != None:
+ headers['receipt'] = receipt
+ self.unsubscribe_dest(self.conn, dest, subid, **headers)
+ return subfun, unsubfun
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py
index f2c90486eb..ef69d6bbee 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py
@@ -37,7 +37,7 @@ class TestUserGeneratedQueueName(base.BaseTest):
# check if we receive the message from the STOMP subscription
self.assertTrue(self.listener.wait(2), "initial message not received")
- self.assertEquals(1, len(self.listener.messages))
+ self.assertEqual(1, len(self.listener.messages))
self.conn.disconnect()
connection.close()
@@ -65,7 +65,15 @@ class TestUserGeneratedQueueName(base.BaseTest):
# check if we receive the message from the STOMP subscription
self.assertTrue(self.listener.wait(2), "initial message not received")
- self.assertEquals(1, len(self.listener.messages))
+ self.assertEqual(1, len(self.listener.messages))
self.conn.disconnect()
connection.close()
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py
index 1018abd0d4..40e380a691 100644
--- a/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py
@@ -55,8 +55,15 @@ class TestUserGeneratedQueueName(base.BaseTest):
if quorum_queue_supported:
# check if we receive the message from the STOMP subscription
- self.assertTrue(self.listener.wait(5), "initial message not received")
- self.assertEquals(1, len(self.listener.messages))
+ self.assertTrue(self.listener.wait_for_complete_countdown(), "initial message not received")
+ self.assertEqual(1, len(self.listener.messages))
self.conn.disconnect()
connection.close()
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_stream.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_stream.py
new file mode 100644
index 0000000000..d4a8a6291f
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_stream.py
@@ -0,0 +1,72 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import pika
+import base
+import time
+import os
+import re
+
+
+class TestUserGeneratedQueueName(base.BaseTest):
+
+ def test_stream_queue(self):
+ queueName = 'my-stream-queue'
+
+ # subscribe
+ self.subscribe_dest(
+ self.conn,
+ '/topic/stream-queue-test',
+ None,
+ headers={
+ 'x-queue-name': queueName,
+ 'x-queue-type': 'stream',
+ 'durable': True,
+ 'auto-delete': False,
+ 'id': 1234,
+ 'prefetch-count': 10
+ },
+ ack="client"
+ )
+
+ # let the stream queue some time to start
+ time.sleep(5)
+
+ connection = pika.BlockingConnection(
+ pika.ConnectionParameters(host='127.0.0.1', port=int(os.environ["AMQP_PORT"])))
+ channel = connection.channel()
+
+ # publish a message to the named queue
+ channel.basic_publish(
+ exchange='',
+ routing_key=queueName,
+ body='Hello World!')
+
+ # could we declare a stream queue?
+ stream_queue_supported = True
+ if len(self.listener.errors) > 0:
+ pattern = re.compile(r"feature flag is disabled", re.MULTILINE)
+ for error in self.listener.errors:
+ if pattern.search(error['message']) != None:
+ stream_queue_supported = False
+ break
+
+ if stream_queue_supported:
+ # check if we receive the message from the STOMP subscription
+ self.assertTrue(self.listener.wait(5), "initial message not received")
+ self.assertEqual(1, len(self.listener.messages))
+ self.conn.disconnect()
+
+ connection.close()
+
+
+if __name__ == '__main__':
+ import test_runner
+ modules = [
+ __name__
+ ]
+ test_runner.run_unittests(modules) \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl b/deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl
index 739512e3b3..38975303bc 100644
--- a/deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl
+++ b/deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
%% The stupidest client imaginable, just for testing.
diff --git a/deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl b/deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl
index 6b5b9298fa..3980a22089 100644
--- a/deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl
+++ b/deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stomp_publish_test).
diff --git a/deps/rabbitmq_stomp/test/amqqueue_SUITE.erl b/deps/rabbitmq_stomp/test/system_SUITE.erl
index 0474fd67d6..e7e2e80a92 100644
--- a/deps/rabbitmq_stomp/test/amqqueue_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/system_SUITE.erl
@@ -2,10 +2,10 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
--module(amqqueue_SUITE).
+-module(system_SUITE).
-compile(export_all).
diff --git a/deps/rabbitmq_stomp/test/topic_SUITE.erl b/deps/rabbitmq_stomp/test/topic_SUITE.erl
index 4a6421a326..40dce2ad2c 100644
--- a/deps/rabbitmq_stomp/test/topic_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/topic_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(topic_SUITE).
diff --git a/deps/rabbitmq_stomp/test/util_SUITE.erl b/deps/rabbitmq_stomp/test/util_SUITE.erl
index 89d9d9e37e..e365289aa6 100644
--- a/deps/rabbitmq_stomp/test/util_SUITE.erl
+++ b/deps/rabbitmq_stomp/test/util_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(util_SUITE).
diff --git a/deps/rabbitmq_stream/.gitignore b/deps/rabbitmq_stream/.gitignore
index 14abe16374..77fa2c1962 100644
--- a/deps/rabbitmq_stream/.gitignore
+++ b/deps/rabbitmq_stream/.gitignore
@@ -24,8 +24,10 @@ erl_crash.dump
/cover/
/deps/
/ebin/
+/escript/
/logs/
/plugins/
+/sbin/
/xrefr
elvis
callgrind*
diff --git a/deps/rabbitmq_stream/BUILD.bazel b/deps/rabbitmq_stream/BUILD.bazel
new file mode 100644
index 0000000000..8601169b5c
--- /dev/null
+++ b/deps/rabbitmq_stream/BUILD.bazel
@@ -0,0 +1,105 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_stream"
+
+APP_DESCRIPTION = "RabbitMQ Stream"
+
+APP_MODULE = "rabbit_stream"
+
+APP_ENV = """[
+ {tcp_listeners, [5552]},
+ {num_tcp_acceptors, 10},
+ {tcp_listen_options, [{backlog, 128},
+ {nodelay, true}]},
+ {ssl_listeners, []},
+ {num_ssl_acceptors, 10},
+ {ssl_listen_options, []},
+ {initial_credits, 50000},
+ {credits_required_for_unblocking, 12500},
+ {frame_max, 1048576},
+ {heartbeat, 60},
+ {advertised_host, undefined},
+ {advertised_port, undefined}
+]"""
+
+BUILD_DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_cli:rabbitmqctl",
+ "@ranch//:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/rabbitmq_stream_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_stream"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "commands_SUITE",
+ additional_beam = [
+ ":rabbit_stream_SUITE_beam_files",
+ ],
+ data = glob(["test/rabbit_stream_SUITE_data/**/*"]),
+ flaky = True,
+ deps = [
+ "//deps/rabbitmq_stream_common:bazel_erlang_lib",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_stream_utils_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_stream_manager_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_stream_SUITE",
+ shard_count = 3,
+ deps = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_stream_common:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_stream/CONTRIBUTING.md b/deps/rabbitmq_stream/CONTRIBUTING.md
index 45bbcbe62e..339d097deb 100644
--- a/deps/rabbitmq_stream/CONTRIBUTING.md
+++ b/deps/rabbitmq_stream/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_stream/Makefile b/deps/rabbitmq_stream/Makefile
index 88d7717281..ac7c2e7472 100644
--- a/deps/rabbitmq_stream/Makefile
+++ b/deps/rabbitmq_stream/Makefile
@@ -4,10 +4,13 @@ PROJECT_MOD = rabbit_stream
define PROJECT_ENV
[
- {tcp_listeners, [5555]},
+ {tcp_listeners, [5552]},
{num_tcp_acceptors, 10},
{tcp_listen_options, [{backlog, 128},
{nodelay, true}]},
+ {ssl_listeners, []},
+ {num_ssl_acceptors, 10},
+ {ssl_listen_options, []},
{initial_credits, 50000},
{credits_required_for_unblocking, 12500},
{frame_max, 1048576},
@@ -18,8 +21,8 @@ define PROJECT_ENV
endef
-DEPS = rabbit
-TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers
+DEPS = rabbit rabbitmq_stream_common
+TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers amqp_client
DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-plugin.mk
DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
@@ -30,5 +33,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_stream/README.adoc b/deps/rabbitmq_stream/README.adoc
index 56e942671b..5cc6cf8a5f 100644
--- a/deps/rabbitmq_stream/README.adoc
+++ b/deps/rabbitmq_stream/README.adoc
@@ -15,34 +15,11 @@ is currently the best way to experiment with the plugin.
The plugin uses a link:docs/PROTOCOL.adoc[custom binary protocol].
-== Project Maturity
-
-The project is in early stages of development and is considered experimental.
-It is not ready for production use.
== Support
* For questions: https://groups.google.com/forum/#!forum/rabbitmq-users[RabbitMQ Users]
-* For bugs and feature requests: https://github.com/rabbitmq/rabbitmq-stream/issues[GitHub Issues]
-
-The project is currently under development, there is no guarantee yet that it will be maintained and supported
-in the future (read: you are welcome to experiment with it and give feedback, but please do not base
-your whole business on it).
-
-== Build Instructions
-
-----
-git clone git@github.com:rabbitmq/rabbitmq-public-umbrella.git
-cd rabbitmq-public-umbrella
-make co
-make up BRANCH="stream-queue" -j 32
-cd deps
-git clone git@github.com:rabbitmq/rabbitmq-stream.git rabbitmq_stream
-cd rabbitmq_stream
-make run-broker
-----
-
-Then follow the instructions to https://github.com/rabbitmq/rabbitmq-stream-java-client[build the client and the performance tool].
+* For bugs and feature requests: https://github.com/rabbitmq/rabbitmq-server/issues[GitHub Issues]
== Licensing
@@ -50,4 +27,4 @@ Released under the link:LICENSE-MPL-RabbitMQ[MPL 2.0].
== Copyright
-(c) 2020 VMware, Inc. or its affiliates. \ No newline at end of file
+(c) 2020-2021 VMware, Inc. or its affiliates.
diff --git a/deps/rabbitmq_stream/docs/PROTOCOL.adoc b/deps/rabbitmq_stream/docs/PROTOCOL.adoc
index 14e149664c..ce59d11653 100644
--- a/deps/rabbitmq_stream/docs/PROTOCOL.adoc
+++ b/deps/rabbitmq_stream/docs/PROTOCOL.adoc
@@ -1,9 +1,9 @@
-= RabbitMQ Stream Protocol Reference
+= RabbitMQ Streams Protocol Reference
-This is the reference of the RabbitMQ stream protocol. Note the protocol
+This is the reference of the RabbitMQ Streams protocol. Note the protocol
is still under development and is subject to change.
-The https://github.com/rabbitmq/rabbitmq-stream-java-client[RabbitMQ stream Java client]
+The https://github.com/rabbitmq/rabbitmq-stream-java-client[RabbitMQ Stream Java client]
is currently the reference implementation.
== Types
@@ -23,28 +23,67 @@ arrays - int32 for the length followed by the repetition of the structure, notat
```
Frame => Size (Request | Response | Command)
- Size => int32 (size without the 4 bytes of the size element)
+ Size => uint32 (size without the 4 bytes of the size element)
Request => Key Version (CorrelationId) Content
- Key => int16
- Version => int16
- CorrelationId => int32
+ Key => uint16
+ Version => uint16
+ CorrelationId => uint32
Command => bytes // see command details below
Response => Key Version CorrelationId ResponseCode
- Key => int16
- Version => int16
- CorrelationId => int32
- ResponseCode => int16
+ Key => uint16
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
Command => Key Version Content
- Key => int16
- Version => int16
+ Key => uint16
+ Version => uint16
Content => bytes // see command details below
```
Most commands are request/reply, but some commands (e.g. `Deliver`) are one-direction only and thus
-doest not contain a correlation ID.
+does not contain a correlation ID.
+
+Some responses may carry additional information than just the response code, this is specified in the command definition.
+
+Keys are uint16, but the actual value is defined on the last 15 bits, the most significant bit being
+used to make the difference between a request (0) and a response (1). Example for `subscribe`
+(key is 6):
+
+```
+0x0006 => subscribe request
+0x8006 => subscribe response
+```
+
+== Response Codes
+
+.Stream Protocol Response Codes
+|===
+|Response|Code
+
+|OK|0x01
+|Stream does not exist|0x02
+|Subscription ID already exists|0x03
+|Subscription ID does not exist|0x04
+|Stream already exists|0x05
+|Stream not available|0x06
+|SASL mechanism not supported|0x07
+|Authentication failure|0x08
+|SASL error|0x09
+|SASL challenge|0x0a
+|SASL authentication failure loopback|0x0b
+|Virtual host access failure|0x0c
+|Unknown frame|0x0d
+|Frame too large|0x0e
+|Internal error|0x0f
+|Access refused|0x10
+|Precondition failed|0x11
+|Publisher does not exist|0x12
+|No offset|0x13
+
+|===
== Commands
@@ -52,118 +91,160 @@ doest not contain a correlation ID.
|===
|Command |From |Key | Expects response?
+|<<declarepublisher>>
+|Client
+|0x0001
+|Yes
+
|<<publish>>
|Client
-|0
+|0x0002
|No
|<<publishconfirm>>
|Server
-|1
+|0x0003
+|No
+
+|<<publisherror>>
+|Server
+|0x0004
|No
+|<<querypublishersequence>>
+|Client
+|0x0005
+|Yes
+
+|<<deletepublisher>>
+|Client
+|0x0006
+|Yes
+
|<<subscribe>>
|Client
-|2
+|0x0007
|Yes
|<<deliver>>
|Server
-|3
+|0x0008
|No
|<<credit>>
|Client
-|4
+|0x0009
+|No
+
+|<<storeoffset>>
+|Client
+|0x000a
|No
+|<<queryoffset>>
+|Client
+|0x000b
+|Yes
+
|<<unsubscribe>>
|Client
-|5
+|0x000c
|Yes
-|<<publisherror>>
-|Server
-|6
-|No
+|<<create>>
+|Client
+|0x000d
+|Yes
+
+|<<delete>>
+|Client
+|0x000e
+|Yes
+
+|<<metadata>>
+|Client
+|0x000f
+|Yes
|<<metadataupdate>>
|Server
-|7
+|0x0010
|No
-|<<metadata>>
+|<<peerproperties>>
|Client
-|8
-|No
+|0x0011
+|Yes
|<<saslhandshake>>
|Client
-|9
+|0x0012
|Yes
|<<saslauthenticate>>
|Client
-|10
+|0x0013
|Yes
|<<tune>>
|Server
-|11
+|0x0014
|Yes
|<<open>>
-|Server
-|12
+|Client
+|0x0015
|Yes
|<<close>>
|Client & Server
-|13
+|0x0016
|Yes
|<<heartbeat>>
|Client & Server
-|14
+|0x0017
|No
-|<<peerproperties>>
+|<<route>> (experimental)
|Client
-|15
+|0x0018
|Yes
-|<<commitoffset>>
-|Client
-|16
-|No
-
-|<<queryoffset>>
+|<<partitions>> (experimental)
|Client
-|17
+|0x0019
|Yes
+|===
-|<<create>>
-|Client
-|998
-|Yes
+=== DeclarePublisher
-|<<delete>>
-|Client
-|999
-|Yes
-|===
+```
+DeclarePublisherRequest => Key Version CorrelationId PublisherId [PublisherReference] Stream
+ Key => uint16 // 0x0001
+ Version => uint16
+ CorrelationId => uint32
+ PublisherId => uint8
+ PublisherReference => string // max 256 characters
+ Stream => string
+
+DeclarePublisherResponse => Key Version CorrelationId ResponseCode
+ Key => uint16 // 0x8001
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
+```
=== Publish
```
-Publish => Key Version Stream PublishedMessages
- Key => int16 // 0
- Version => int16
- Stream => string // the name of the stream
+Publish => Key Version PublisherId PublishedMessages
+ Key => uint16 // 0x0002
+ Version => uint16
PublisherId => uint8
PublishedMessages => [PublishedMessage]
PublishedMessage => PublishingId Message
- PublishingId => int64
+ PublishingId => uint64
Message => bytes
```
@@ -171,138 +252,261 @@ Publish => Key Version Stream PublishedMessages
```
PublishConfirm => Key Version PublishingIds
- Key => int16 // 1
- Version => int16
+ Key => uint16 // 0x0003
+ Version => uint16
+ PublisherId => uint8
+ PublishingIds => [uint64] // to correlate with the messages sent
+```
+
+=== PublishError
+
+```
+PublishError => Key Version [PublishingError]
+ Key => uint16 // 0x0004
+ Version => uint16
+ PublisherId => uint8
+ PublishingError => PublishingId Code
+ PublishingId => uint64
+ Code => uint16 // code to identify the problem
+```
+
+=== QueryPublisherSequence
+
+```
+QueryPublisherRequest => Key Version CorrelationId PublisherReference Stream
+ Key => uint16 // 0x0005
+ Version => uint16
+ CorrelationId => uint32
+ PublisherReference => string // max 256 characters
+ Stream => string
+
+QueryPublisherResponse => Key Version CorrelationId ResponseCode Sequence
+ Key => uint16 // 0x8005
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
+ Sequence => uint64
+```
+
+=== DeletePublisher
+
+```
+DeletePublisherRequest => Key Version CorrelationId PublisherId
+ Key => uint16 // 0x0006
+ Version => uint16
+ CorrelationId => uint32
PublisherId => uint8
- PublishingIds => [int64] // to correlate with the messages sent
+
+DeletePublisherResponse => Key Version CorrelationId ResponseCode
+ Key => uint16 // 0x8006
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
```
=== Subscribe
```
-Subscribe => Key Version CorrelationId SubscriptionId Stream OffsetSpecification Credit
- Key => int16 // 2
- Version => int16
- CorrelationId => int32 // correlation id to correlate the response
+Subscribe => Key Version CorrelationId SubscriptionId Stream OffsetSpecification Credit Properties
+ Key => uint16 // 0x0007
+ Version => uint16
+ CorrelationId => uint32 // correlation id to correlate the response
SubscriptionId => uint8 // client-supplied id to identify the subscription
Stream => string // the name of the stream
OffsetSpecification => OffsetType Offset
- OffsetType => int16 // 0 (first), 1 (last), 2 (next), 3 (offset), 4 (timestamp)
+ OffsetType => uint16 // 1 (first), 2 (last), 3 (next), 4 (offset), 5 (timestamp)
Offset => uint64 (for offset) | int64 (for timestamp)
- Credit => int16
+ Credit => uint16
+ Properties => [Property]
+ Property => Key Value
+ Key => string
+ Value => string
```
+NB: Timestamp is https://www.erlang.org/doc/apps/erts/time_correction.html#Erlang_System_Time[Erlang system time],
+milliseconds from epoch
+
=== Deliver
```
Deliver => Key Version SubscriptionId OsirisChunk
- Key => int16 // 3
- Version => int32
+ Key => uint16 // 0x0008
+ Version => uint16
SubscriptionId => uint8
OsirisChunk => MagicVersion NumEntries NumRecords Epoch ChunkFirstOffset ChunkCrc DataLength Messages
MagicVersion => int8
+ ChunkType => int8 // 0: user, 1: tracking delta, 2: tracking snapshot
NumEntries => uint16
NumRecords => uint32
+ Timestamp => int64 // erlang system time in milliseconds, since epoch
Epoch => uint64
ChunkFirstOffset => uint64
ChunkCrc => int32
DataLength => uint32
- Messages => [Message] // no int32 for the size for this array
+ TrailerLength => uint32
+ Reserved => unit32 // unused 4 bytes
+ Messages => [Message] // no int32 for the size for this array; the size is defined by NumEntries field above
Message => EntryTypeAndSize
Data => bytes
```
-NB: See the https://github.com/rabbitmq/osiris/blob/348db0528986d6025b823bcf1ae0570aa63f5e25/src/osiris_log.erl#L49-L81[Osiris project]
+NB: See the https://github.com/rabbitmq/osiris/blob/f32df7563a036b1687c0208a3cb5f9e8f5cee937/src/osiris_log.erl#L101[Osiris project]
for details on the structure of messages.
=== Credit
```
Credit => Key Version SubscriptionId Credit
- Key => int16 // 4
- Version => int16
- SubscriptionId => int8
- Credit => int16 // the number of chunks that can be sent
+ Key => uint16 // 0x0009
+ Version => uint16
+ SubscriptionId => uint8
+ Credit => uint16 // the number of chunks that can be sent
CreditResponse => Key Version ResponseCode SubscriptionId
- Key => int16 // 4
- Version => int16
- ResponseCode => int16
- SubscriptionId => int8
+ Key => uint16 // 0x8009
+ Version => uint16
+ ResponseCode => uint16
+ SubscriptionId => uint8
```
NB: the server sent a response only in case of problem, e.g. crediting an unknown subscription.
+=== StoreOffset
+
+```
+StoreOffset => Key Version Reference Stream Offset
+ Key => uint16 // 0x000a
+ Version => uint16
+ Reference => string // max 256 characters
+ Stream => string // the name of the stream
+ Offset => uint64
+```
+
+=== QueryOffset
+
+```
+QueryOffsetRequest => Key Version CorrelationId Reference Stream
+ Key => uint16 // 0x000b
+ Version => uint16
+ CorrelationId => uint32
+ Reference => string // max 256 characters
+ Stream => string
+
+QueryOffsetResponse => Key Version CorrelationId ResponseCode Offset
+ Key => uint16 // 0x800b
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
+ Offset => uint64
+```
+
=== Unsubscribe
```
Unsubscribe => Key Version CorrelationId SubscriptionId
- Key => int16 // 5
- Version => int16
- CorrelationId => int32
- SubscriptionId => int8
+ Key => uint16 // 0x000c
+ Version => uint16
+ CorrelationId => uint32
+ SubscriptionId => uint8
```
-=== PublishError
+=== Create
```
-PublishError => Key Version [PublishingError]
- Key => int16 // 6
- Version => int16
- PublisherId => int8
- PublishingError => PublishingId Code
- PublishingId => int64
- Code => int16 // code to identify the problem
+Create => Key Version CorrelationId Stream Arguments
+ Key => uint16 // 0x000d
+ Version => uint16
+ CorrelationId => uint32
+ Stream => string
+ Arguments => [Argument]
+ Argument => Key Value
+ Key => string
+ Value => string
```
-=== MetadataUpdate
+=== Delete
```
-MetadataUpdate => Key Version MetadataInfo
- Key => int16 // 7
- Version => int16
- MetadataInfo => Code Stream
- Code => int16 // code to identify the information
- Stream => string // the stream implied
+Delete => Key Version CorrelationId Stream
+ Key => uint16 // 0x000e
+ Version => uint16
+ CorrelationId => uint32
+ Stream => string
```
=== Metadata
```
MetadataQuery => Key Version CorrelationId [Stream]
- Key => int16 // 8
- Version => int16
- CorrelationId => int32
+ Key => uint16 // 0x000f
+ Version => uint16
+ CorrelationId => uint32
Stream => string
MetadataResponse => Key Version CorrelationId [Broker] [StreamMetadata]
- Key => int16 // 8
- Version => int16
- CorrelationId => int32
+ Key => uint16 // 0x800f
+ Version => uint16
+ CorrelationId => uint32
Broker => Reference Host Port
- Reference => int16
+ Reference => uint16
Host => string
- Port => int32
+ Port => uint32
StreamMetadata => StreamName LeaderReference ReplicasReferences
StreamName => string
- ResponseCode => int16
- LeaderReference => int16
- ReplicasReferences => [int16]
+ ResponseCode => uint16
+ LeaderReference => uint16
+ ReplicasReferences => [uint16]
+```
+
+=== MetadataUpdate
+
+```
+MetadataUpdate => Key Version MetadataInfo
+ Key => uint16 // 0x0010
+ Version => uint16
+ MetadataInfo => Code Stream
+ Code => uint16 // code to identify the information
+ Stream => string // the stream implied
+```
+
+=== PeerProperties
+
+```
+PeerPropertiesRequest => Key Version PeerProperties
+ Key => uint16 // 0x0011
+ Version => uint16
+ CorrelationId => uint32
+ PeerProperties => [PeerProperty]
+ PeerProperty => Key Value
+ Key => string
+ Value => string
+
+PeerPropertiesResponse => Key Version CorrelationId ResponseCode PeerProperties
+ Key => uint16 // 0x8011
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
+ PeerProperties => [PeerProperty]
+ PeerProperty => Key Value
+ Key => string
+ Value => string
```
=== SaslHandshake
```
SaslHandshakeRequest => Key Version CorrelationId Mechanism
- Key => int16 // 9
- Version => int16
- CorrelationId => int32
-
-SaslHandshakeResponse => Key Version CorrelationId ResponseCode [Mechanism]
- Key => int16 // 9
- Version => int16
- CorrelationId => int32
- ResponseCode => int16
+ Key => uint16 // 0x0012
+ Version => uint16
+ CorrelationId => uint32
+ Mechanism => string
+
+SaslHandshakeResponse => Key Version CorrelationId ResponseCode [Mechanisms]
+ Key => uint16 // 0x8012
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
+ Mechanisms => [Mechanism]
Mechanism => string
```
@@ -310,17 +514,17 @@ SaslHandshakeResponse => Key Version CorrelationId ResponseCode [Mechanism]
```
SaslAuthenticateRequest => Key Version CorrelationId Mechanism SaslOpaqueData
- Key => int16 // 10
- Version => int16
- CorrelationId => int32
+ Key => uint16 // 0x0013
+ Version => uint16
+ CorrelationId => uint32
Mechanism => string
SaslOpaqueData => bytes
SaslAuthenticateResponse => Key Version CorrelationId ResponseCode SaslOpaqueData
- Key => int16 // 10
- Version => int16
- CorrelationId => int32
- ResponseCode => int16
+ Key => uint16 // 0x8013
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
SaslOpaqueData => bytes
```
@@ -328,10 +532,10 @@ SaslAuthenticateResponse => Key Version CorrelationId ResponseCode SaslOpaqueDat
```
TuneRequest => Key Version FrameMax Heartbeat
- Key => int16 // 11, to identify the command
- Version => int16
- FrameMax => int32 // in bytes, 0 means no limit
- Heartbeat => int32 // in seconds, 0 means no heartbeat
+ Key => uint16 // 0x0014
+ Version => uint16
+ FrameMax => uint32 // in bytes, 0 means no limit
+ Heartbeat => uint32 // in seconds, 0 means no heartbeat
TuneResponse => TuneRequest
```
@@ -340,117 +544,81 @@ TuneResponse => TuneRequest
```
OpenRequest => Key Version CorrelationId VirtualHost
- Key => int16 // 12
- Version => int16
- CorrelationId => int32
+ Key => uint16 // 0x0015
+ Version => uint16
+ CorrelationId => uint32
VirtualHost => string
-OpenResponse => Key Version CorrelationId ResponseCode
- Key => int16 // 12
- Version => int16
- CorrelationId => int32
- ResponseCode => int16
+OpenResponse => Key Version CorrelationId ResponseCode ConnectionProperties
+ Key => uint16 // 0x8015
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
+ ConnectionProperties => [ConnectionProperty]
+ ConnectionProperty => Key Value
+ Key => string
+ Value => string
```
=== Close
```
CloseRequest => Key Version CorrelationId ClosingCode ClosingReason
- Key => int16 // 13
- Version => int16
- CorrelationId => int32
- ClosingCode => int16
+ Key => uint16 // 0x0016
+ Version => uint16
+ CorrelationId => uint32
+ ClosingCode => uint16
ClosingReason => string
CloseResponse => Key Version CorrelationId ResponseCode
- Key => int16 // 13
- Version => int16
- CorrelationId => int32
- ResponseCode => int16
+ Key => uint16 // 0x8016
+ Version => uint16
+ CorrelationId => uint32
+ ResponseCode => uint16
```
=== Heartbeat
```
Heartbeat => Key Version
- Key => int16 // 14
- Version => int16
-```
-
-=== PeerProperties
-
+ Key => uint16 // 0x0017
+ Version => uint16
```
-PeerPropertiesRequest => Key Version PeerProperties
- Key => int16 // 15
- Version => int16
- CorrelationId => int32
- PeerProperties => [PeerProperty]
- PeerProperty => Key Value
- Key => string
- Value => string
-SaslAuthenticateResponse => Key Version CorrelationId ResponseCode PeerProperties
- Key => int16 // 15
- Version => int16
- CorrelationId => int32
- ResponseCode => int16
- PeerProperties => [PeerProperty]
- PeerProperty => Key Value
- Key => string
- Value => string
-```
+=== Route
-=== CommitOffset
+_Experimental_
```
-CommitOffset => Key Version Reference Stream Offset
- Key => int16 // 16
- Version => int16
- CorrelationId => int32 // not used yet
- Reference => string // max 256 characters
- SubscriptionId => uint8
- Offset => int64
-```
+RouteQuery => Key Version CorrelationId RoutingKey SuperStream
+ Key => uint16 // 0x0018
+ Version => uint16
+ CorrelationId => uint32
+ RoutingKey => string
+ SuperStream => string
-=== QueryOffset
-
-```
-QueryOffsetRequest => Key Version CorrelationId Reference Stream
- Key => int16 // 17
- Version => int16
- CorrelationId => int32
- Reference => string // max 256 characters
+RouteResponse => Key Version CorrelationId [Stream]
+ Key => uint16 // 0x8018
+ Version => uint16
+ CorrelationId => uint32
Stream => string
-
-QueryOffsetResponse => Key Version CorrelationId Reference Stream
- Key => int16 // 17
- Version => int16
- CorrelationId => int32
- ResponseCode => int16
- Offset => int64
```
-=== Create
-
-```
-Create => Key Version CorrelationId Stream Arguments
- Key => int16 // 998
- Version => int16
- CorrelationId => int32
- Stream => string
- Arguments => [Argument]
- Argument => Key Value
- Key => string
- Value => string
-```
+=== Partitions
-=== Delete
+_Experimental_
```
-Delete => Key Version CorrelationId Stream
- Key => int16 // 999
- Version => int16
- CorrelationId => int32
+PartitionsQuery => Key Version CorrelationId SuperStream
+ Key => uint16 // 0x0019
+ Version => uint16
+ CorrelationId => uint32
+ SuperStream => string
+
+PartitionsResponse => Key Version CorrelationId [Stream]
+ Key => uint16 // 0x8019
+ Version => uint16
+ CorrelationId => uint32
Stream => string
```
@@ -496,4 +664,9 @@ with the client authentication response.
The client answers with a `Tune` frame with the settings he agrees on, possibly adjusted
from the server's suggestions.
* Open: the client sends an `Open` frame to pick a virtual host to connect to. The server
-answers whether it accepts the access or not. \ No newline at end of file
+answers whether it accepts the access or not.
+
+== Resources
+
+- https://docs.google.com/presentation/d/1Hlv4qaWm2PRU04dVPmShP9wU7TEQEttXdsbV8P54Uvw/edit#slide=id.gdbeadf9676_0_37[RabbitMQ Streams client] : a general guide line to write a streams client
+- https://docs.google.com/presentation/d/1BFwf01LcicZ-SyxE1CycZv2gUQMPFGdtFkVuXhgkoTE/edit#slide=id.p1[RabbitMQ Streams Internals]: how the streams work internally
diff --git a/deps/rabbitmq_stream/erlang.mk b/deps/rabbitmq_stream/erlang.mk
deleted file mode 100644
index 83988d3025..0000000000
--- a/deps/rabbitmq_stream/erlang.mk
+++ /dev/null
@@ -1,7712 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-18-g7edc30a
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lerl_interface -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- ok = file:write_file("$(1)", unicode:characters_to_binary([
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ])),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir:
- $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(call core_find,$(TEST_DIR)/,*.erl)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lerl_interface -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(?F);
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(?F);
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_stream/include/rabbit_stream.hrl b/deps/rabbitmq_stream/include/rabbit_stream.hrl
deleted file mode 100644
index 0593893d93..0000000000
--- a/deps/rabbitmq_stream/include/rabbit_stream.hrl
+++ /dev/null
@@ -1,70 +0,0 @@
--define(COMMAND_PUBLISH, 0).
--define(COMMAND_PUBLISH_CONFIRM, 1).
--define(COMMAND_SUBSCRIBE, 2).
--define(COMMAND_DELIVER, 3).
--define(COMMAND_CREDIT, 4).
--define(COMMAND_UNSUBSCRIBE, 5).
--define(COMMAND_PUBLISH_ERROR, 6).
--define(COMMAND_METADATA_UPDATE, 7).
--define(COMMAND_METADATA, 8).
--define(COMMAND_SASL_HANDSHAKE, 9).
--define(COMMAND_SASL_AUTHENTICATE, 10).
--define(COMMAND_TUNE, 11).
--define(COMMAND_OPEN, 12).
--define(COMMAND_CLOSE, 13).
--define(COMMAND_HEARTBEAT, 14).
--define(COMMAND_PEER_PROPERTIES, 15).
--define(COMMAND_COMMIT_OFFSET, 16).
--define(COMMAND_QUERY_OFFSET, 17).
--define(COMMAND_CREATE_STREAM, 998).
--define(COMMAND_DELETE_STREAM, 999).
-
--define(VERSION_0, 0).
-
--define(RESPONSE_CODE_OK, 0).
--define(RESPONSE_CODE_STREAM_DOES_NOT_EXIST, 1).
--define(RESPONSE_CODE_SUBSCRIPTION_ID_ALREADY_EXISTS, 2).
--define(RESPONSE_CODE_SUBSCRIPTION_ID_DOES_NOT_EXIST, 3).
--define(RESPONSE_CODE_STREAM_ALREADY_EXISTS, 4).
--define(RESPONSE_CODE_STREAM_NOT_AVAILABLE, 5).
--define(RESPONSE_SASL_MECHANISM_NOT_SUPPORTED, 6).
--define(RESPONSE_AUTHENTICATION_FAILURE, 7).
--define(RESPONSE_SASL_ERROR, 8).
--define(RESPONSE_SASL_CHALLENGE, 9).
--define(RESPONSE_SASL_AUTHENTICATION_FAILURE_LOOPBACK, 10).
--define(RESPONSE_VHOST_ACCESS_FAILURE, 11).
--define(RESPONSE_CODE_UNKNOWN_FRAME, 12).
--define(RESPONSE_CODE_FRAME_TOO_LARGE, 13).
--define(RESPONSE_CODE_INTERNAL_ERROR, 14).
--define(RESPONSE_CODE_ACCESS_REFUSED, 15).
--define(RESPONSE_CODE_PRECONDITION_FAILED, 16).
-
--define(OFFSET_TYPE_FIRST, 0).
--define(OFFSET_TYPE_LAST, 1).
--define(OFFSET_TYPE_NEXT, 2).
--define(OFFSET_TYPE_OFFSET, 3).
--define(OFFSET_TYPE_TIMESTAMP, 4).
-
--define(DEFAULT_INITIAL_CREDITS, 50000).
--define(DEFAULT_CREDITS_REQUIRED_FOR_UNBLOCKING, 12500).
--define(DEFAULT_FRAME_MAX, 1048576). %% 1 MiB
--define(DEFAULT_HEARTBEAT, 60). %% 60 seconds
-
--define(INFO_ITEMS,
- [conn_name,
- port,
- peer_port,
- host,
- peer_host,
- user,
- vhost,
- subscriptions,
- connection_state,
- auth_mechanism,
- heartbeat,
- frame_max,
- client_properties,
- connected_at
- ]).
-
--define(STREAM_GUIDE_URL, <<"https://rabbitmq.com/stream.html">>). \ No newline at end of file
diff --git a/deps/rabbitmq_stream/include/rabbit_stream_metrics.hrl b/deps/rabbitmq_stream/include/rabbit_stream_metrics.hrl
new file mode 100644
index 0000000000..066962c4de
--- /dev/null
+++ b/deps/rabbitmq_stream/include/rabbit_stream_metrics.hrl
@@ -0,0 +1,94 @@
+-include_lib("rabbit/include/rabbit_global_counters.hrl").
+
+-define(TABLE_CONSUMER, rabbit_stream_consumer_created).
+-define(TABLE_PUBLISHER, rabbit_stream_publisher_created).
+
+-define(STREAM_DOES_NOT_EXIST, ?NUM_PROTOCOL_COUNTERS + 1).
+-define(SUBSCRIPTION_ID_ALREADY_EXISTS, ?NUM_PROTOCOL_COUNTERS + 2).
+-define(SUBSCRIPTION_ID_DOES_NOT_EXIST, ?NUM_PROTOCOL_COUNTERS + 3).
+-define(STREAM_ALREADY_EXISTS, ?NUM_PROTOCOL_COUNTERS + 4).
+-define(STREAM_NOT_AVAILABLE, ?NUM_PROTOCOL_COUNTERS + 5).
+-define(SASL_MECHANISM_NOT_SUPPORTED, ?NUM_PROTOCOL_COUNTERS + 6).
+-define(AUTHENTICATION_FAILURE, ?NUM_PROTOCOL_COUNTERS + 7).
+-define(SASL_ERROR, ?NUM_PROTOCOL_COUNTERS + 8).
+-define(SASL_CHALLENGE, ?NUM_PROTOCOL_COUNTERS + 9).
+-define(SASL_AUTHENTICATION_FAILURE_LOOPBACK, ?NUM_PROTOCOL_COUNTERS + 10).
+-define(VHOST_ACCESS_FAILURE, ?NUM_PROTOCOL_COUNTERS + 11).
+-define(UNKNOWN_FRAME, ?NUM_PROTOCOL_COUNTERS + 12).
+-define(FRAME_TOO_LARGE, ?NUM_PROTOCOL_COUNTERS + 13).
+-define(INTERNAL_ERROR, ?NUM_PROTOCOL_COUNTERS + 14).
+-define(ACCESS_REFUSED, ?NUM_PROTOCOL_COUNTERS + 15).
+-define(PRECONDITION_FAILED, ?NUM_PROTOCOL_COUNTERS + 16).
+-define(PUBLISHER_DOES_NOT_EXIST, ?NUM_PROTOCOL_COUNTERS + 17).
+
+-define(PROTOCOL_COUNTERS,
+ [
+ {
+ stream_error_stream_does_not_exist_total, ?STREAM_DOES_NOT_EXIST, counter,
+ "Total number of commands rejected with stream does not exist error"
+ },
+ {
+ stream_error_subscription_id_already_exists_total, ?SUBSCRIPTION_ID_ALREADY_EXISTS, counter,
+ "Total number of commands failed with subscription id already exists"
+ },
+ {
+ stream_error_subscription_id_does_not_exist_total, ?SUBSCRIPTION_ID_DOES_NOT_EXIST, counter,
+ "Total number of commands failed with subscription id does not exist"
+ },
+ {
+ stream_error_stream_already_exists_total, ?STREAM_ALREADY_EXISTS, counter,
+ "Total number of commands failed with stream already exists"
+ },
+ {
+ stream_error_stream_not_available_total, ?STREAM_NOT_AVAILABLE, counter,
+ "Total number of commands failed with stream not available"
+ },
+ {
+ stream_error_sasl_mechanism_not_supported_total, ?SASL_MECHANISM_NOT_SUPPORTED, counter,
+ "Total number of commands failed with sasl mechanism not supported"
+ },
+ {
+ stream_error_authentication_failure_total, ?AUTHENTICATION_FAILURE, counter,
+ "Total number of commands failed with authentication failure"
+ },
+ {
+ stream_error_sasl_error_total, ?SASL_ERROR, counter,
+ "Total number of commands failed with sasl error"
+ },
+ {
+ stream_error_sasl_challenge_total, ?SASL_CHALLENGE, counter,
+ "Total number of commands failed with sasl challenge"
+ },
+ {
+ stream_error_sasl_authentication_failure_loopback_total, ?SASL_AUTHENTICATION_FAILURE_LOOPBACK, counter,
+ "Total number of commands failed with sasl authentication failure loopback"
+ },
+ {
+ stream_error_vhost_access_failure_total, ?VHOST_ACCESS_FAILURE, counter,
+ "Total number of commands failed with vhost access failure"
+ },
+ {
+ stream_error_unknown_frame_total, ?UNKNOWN_FRAME, counter,
+ "Total number of commands failed with unknown frame"
+ },
+ {
+ stream_error_frame_too_large_total, ?FRAME_TOO_LARGE, counter,
+ "Total number of commands failed with frame too large"
+ },
+ {
+ stream_error_internal_error_total, ?INTERNAL_ERROR, counter,
+ "Total number of commands failed with internal error"
+ },
+ {
+ stream_error_access_refused_total, ?ACCESS_REFUSED, counter,
+ "Total number of commands failed with access refused"
+ },
+ {
+ stream_error_precondition_failed_total, ?PRECONDITION_FAILED, counter,
+ "Total number of commands failed with precondition failed"
+ },
+ {
+ stream_error_publisher_does_not_exist_total, ?PUBLISHER_DOES_NOT_EXIST, counter,
+ "Total number of commands failed with publisher does not exist"
+ }
+ ]).
diff --git a/deps/rabbitmq_stream/priv/schema/rabbitmq_stream.schema b/deps/rabbitmq_stream/priv/schema/rabbitmq_stream.schema
index 0dc66d5382..0a4fc277ac 100644
--- a/deps/rabbitmq_stream/priv/schema/rabbitmq_stream.schema
+++ b/deps/rabbitmq_stream/priv/schema/rabbitmq_stream.schema
@@ -16,8 +16,8 @@
% [%% Network Configuration - the format is generally the same as for the broker
%% Listen only on localhost (ipv4 & ipv6) on a specific port.
-%% {tcp_listeners, [{"127.0.0.1", 5555},
-%% {"::1", 5555}]},
+%% {tcp_listeners, [{"127.0.0.1", 5552},
+%% {"::1", 5552}]},
{mapping, "stream.listeners.tcp", "rabbitmq_stream.tcp_listeners",[
{datatype, {enum, [none]}}
@@ -128,6 +128,46 @@ end}.
{datatype, integer}
]}.
+
+%%
+%% TLS
+%%
+
+{mapping, "stream.listeners.ssl", "rabbitmq_stream.ssl_listeners",[
+ {datatype, {enum, [none]}}
+]}.
+
+{mapping, "stream.listeners.ssl.$name", "rabbitmq_stream.ssl_listeners",[
+ {datatype, [integer, ip]}
+]}.
+
+{translation, "rabbitmq_stream.ssl_listeners",
+fun(Conf) ->
+ case cuttlefish:conf_get("stream.listeners.ssl", Conf, undefined) of
+ none -> [];
+ _ ->
+ Settings = cuttlefish_variable:filter_by_prefix("stream.listeners.ssl", Conf),
+ [ V || {_, V} <- Settings ]
+ end
+end}.
+
+%% Number of Erlang processes that will accept connections for the SSL listeners.
+%%
+%% {num_ssl_acceptors, 10},
+
+{mapping, "stream.num_acceptors.ssl", "rabbitmq_stream.num_ssl_acceptors", [
+ {datatype, integer}
+]}.
+
+%% Additional TLS options
+
+%% Extract a name from the client's certificate when using TLS.
+%%
+%% Defaults to true.
+
+{mapping, "stream.ssl_cert_login", "rabbitmq_stream.ssl_cert_login",
+ [{datatype, {enum, [true, false]}}]}.
+
{mapping, "stream.initial_credits", "rabbitmq_stream.initial_credits", [
{datatype, integer}
]}.
@@ -153,6 +193,19 @@ fun(Conf) ->
list_to_binary(cuttlefish:conf_get("stream.advertised_host", Conf))
end}.
+{mapping, "stream.advertised_tls_host", "rabbitmq_stream.advertised_tls_host", [
+ {datatype, string}
+]}.
+
+{translation, "rabbitmq_stream.advertised_tls_host",
+fun(Conf) ->
+ list_to_binary(cuttlefish:conf_get("stream.advertised_tls_host", Conf))
+end}.
+
{mapping, "stream.advertised_port", "rabbitmq_stream.advertised_port", [
{datatype, integer}
-]}. \ No newline at end of file
+]}.
+
+{mapping, "stream.advertised_tls_port", "rabbitmq_stream.advertised_tls_port", [
+ {datatype, integer}
+]}.
diff --git a/deps/rabbitmq_stream/rabbitmq-components.mk b/deps/rabbitmq_stream/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_stream/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_stream/rebar.config b/deps/rabbitmq_stream/rebar.config
new file mode 100644
index 0000000000..17a6ccdc63
--- /dev/null
+++ b/deps/rabbitmq_stream/rebar.config
@@ -0,0 +1,12 @@
+{plugins, [rebar3_format]}.
+
+{format, [
+ {files, ["src/*.erl", "test/*.erl"]},
+ {formatter, default_formatter},
+ {options, #{
+ paper => 80,
+ ribbon => 70,
+ inline_attributes => {when_under, 1},
+ inline_items => {when_under, 4}
+ }}
+]}. \ No newline at end of file
diff --git a/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddSuperStreamCommand.erl b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddSuperStreamCommand.erl
new file mode 100644
index 0000000000..147c446dfb
--- /dev/null
+++ b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.AddSuperStreamCommand.erl
@@ -0,0 +1,300 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 2.0 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+
+-module('Elixir.RabbitMQ.CLI.Ctl.Commands.AddSuperStreamCommand').
+
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
+
+-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+
+-ignore_xref([{'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Core.Helpers', cli_acting_user, 0},
+ {'Elixir.RabbitMQ.CLI.Core.ExitCodes', exit_software, 0}]).
+
+-export([scopes/0,
+ usage/0,
+ usage_additional/0,
+ usage_doc_guides/0,
+ switches/0,
+ banner/2,
+ validate/2,
+ merge_defaults/2,
+ run/2,
+ output/2,
+ description/0,
+ help_section/0]).
+
+scopes() ->
+ [streams].
+
+description() ->
+ <<"Add a super stream (experimental feature)">>.
+
+switches() ->
+ [{partitions, integer},
+ {routing_keys, string},
+ {max_length_bytes, string},
+ {max_age, string},
+ {stream_max_segment_size_bytes, string},
+ {leader_locator, string},
+ {initial_cluster_size, integer}].
+
+help_section() ->
+ {plugin, stream}.
+
+validate([], _Opts) ->
+ {validation_failure, not_enough_args};
+validate([_Name], #{partitions := _, routing_keys := _}) ->
+ {validation_failure,
+ "Specify --partitions or routing-keys, not both."};
+validate([_Name], #{partitions := Partitions}) when Partitions < 1 ->
+ {validation_failure, "The partition number must be greater than 0"};
+validate([_Name], Opts) ->
+ validate_stream_arguments(Opts);
+validate(_, _Opts) ->
+ {validation_failure, too_many_args}.
+
+validate_stream_arguments(#{max_length_bytes := Value} = Opts) ->
+ case parse_information_unit(Value) of
+ error ->
+ {validation_failure,
+ "Invalid value for --max-length-bytes, valid example "
+ "values: 100gb, 50mb"};
+ _ ->
+ validate_stream_arguments(maps:remove(max_length_bytes, Opts))
+ end;
+validate_stream_arguments(#{max_age := Value} = Opts) ->
+ case rabbit_date_time:parse_duration(Value) of
+ {ok, _} ->
+ validate_stream_arguments(maps:remove(max_age, Opts));
+ error ->
+ {validation_failure,
+ "Invalid value for --max-age, the value must a "
+ "ISO 8601 duration, e.g. e.g. PT10M30S for 10 "
+ "minutes 30 seconds, P5DT8H for 5 days 8 hours."}
+ end;
+validate_stream_arguments(#{stream_max_segment_size_bytes := Value} =
+ Opts) ->
+ case parse_information_unit(Value) of
+ error ->
+ {validation_failure,
+ "Invalid value for --stream-max-segment-size-bytes, "
+ "valid example values: 100gb, 50mb"};
+ _ ->
+ validate_stream_arguments(maps:remove(stream_max_segment_size_bytes,
+ Opts))
+ end;
+validate_stream_arguments(#{leader_locator := <<"client-local">>} =
+ Opts) ->
+ validate_stream_arguments(maps:remove(leader_locator, Opts));
+validate_stream_arguments(#{leader_locator := <<"random">>} = Opts) ->
+ validate_stream_arguments(maps:remove(leader_locator, Opts));
+validate_stream_arguments(#{leader_locator := <<"least-leaders">>} =
+ Opts) ->
+ validate_stream_arguments(maps:remove(leader_locator, Opts));
+validate_stream_arguments(#{leader_locator := _}) ->
+ {validation_failure,
+ "Invalid value for --leader-locator, valid values "
+ "are client-local, random, least-leaders."};
+validate_stream_arguments(#{initial_cluster_size := Value} = Opts) ->
+ try
+ case rabbit_data_coercion:to_integer(Value) of
+ S when S > 0 ->
+ validate_stream_arguments(maps:remove(initial_cluster_size,
+ Opts));
+ _ ->
+ {validation_failure,
+ "Invalid value for --initial-cluster-size, the "
+ "value must be positive."}
+ end
+ catch
+ error:_ ->
+ {validation_failure,
+ "Invalid value for --initial-cluster-size, the "
+ "value must be a positive integer."}
+ end;
+validate_stream_arguments(_) ->
+ ok.
+
+merge_defaults(_Args, #{routing_keys := _V} = Opts) ->
+ {_Args, maps:merge(#{vhost => <<"/">>}, Opts)};
+merge_defaults(_Args, Opts) ->
+ {_Args, maps:merge(#{partitions => 3, vhost => <<"/">>}, Opts)}.
+
+usage() ->
+ <<"add_super_stream <name> [--vhost <vhost>] [--partition"
+ "s <partitions>] [--routing-keys <routing-keys>]">>.
+
+usage_additional() ->
+ [["<name>", "The name of the super stream."],
+ ["--vhost <vhost>", "The virtual host the super stream is added to."],
+ ["--partitions <partitions>",
+ "The number of partitions, default is 3. Mutually "
+ "exclusive with --routing-keys."],
+ ["--routing-keys <routing-keys>",
+ "Comma-separated list of routing keys. Mutually "
+ "exclusive with --partitions."],
+ ["--max-length-bytes <max-length-bytes>",
+ "The maximum size of partition streams, example "
+ "values: 20gb, 500mb."],
+ ["--max-age <max-age>",
+ "The maximum age of partition stream segments, "
+ "using the ISO 8601 duration format, e.g. PT10M30S "
+ "for 10 minutes 30 seconds, P5DT8H for 5 days "
+ "8 hours."],
+ ["--stream-max-segment-size-bytes <stream-max-segment-si"
+ "ze-bytes>",
+ "The maximum size of partition stream segments, "
+ "example values: 500mb, 1gb."],
+ ["--leader-locator <leader-locator>",
+ "Leader locator strategy for partition streams, "
+ "possible values are client-local, least-leaders, "
+ "random."],
+ ["--initial-cluster-size <initial-cluster-size>",
+ "The initial cluster size of partition streams."]].
+
+usage_doc_guides() ->
+ [?STREAM_GUIDE_URL].
+
+run([SuperStream],
+ #{node := NodeName,
+ vhost := VHost,
+ timeout := Timeout,
+ partitions := Partitions} =
+ Opts) ->
+ Streams =
+ [list_to_binary(binary_to_list(SuperStream)
+ ++ "-"
+ ++ integer_to_list(K))
+ || K <- lists:seq(0, Partitions - 1)],
+ RoutingKeys =
+ [integer_to_binary(K) || K <- lists:seq(0, Partitions - 1)],
+ create_super_stream(NodeName,
+ Timeout,
+ VHost,
+ SuperStream,
+ Streams,
+ stream_arguments(Opts),
+ RoutingKeys);
+run([SuperStream],
+ #{node := NodeName,
+ vhost := VHost,
+ timeout := Timeout,
+ routing_keys := RoutingKeysStr} =
+ Opts) ->
+ RoutingKeys =
+ [rabbit_data_coercion:to_binary(
+ string:strip(K))
+ || K
+ <- string:tokens(
+ rabbit_data_coercion:to_list(RoutingKeysStr), ",")],
+ Streams =
+ [list_to_binary(binary_to_list(SuperStream)
+ ++ "-"
+ ++ binary_to_list(K))
+ || K <- RoutingKeys],
+ create_super_stream(NodeName,
+ Timeout,
+ VHost,
+ SuperStream,
+ Streams,
+ stream_arguments(Opts),
+ RoutingKeys).
+
+stream_arguments(Opts) ->
+ stream_arguments(#{}, Opts).
+
+stream_arguments(Acc, Arguments) when map_size(Arguments) =:= 0 ->
+ Acc;
+stream_arguments(Acc, #{max_length_bytes := Value} = Arguments) ->
+ stream_arguments(maps:put(<<"max-length-bytes">>,
+ parse_information_unit(Value), Acc),
+ maps:remove(max_length_bytes, Arguments));
+stream_arguments(Acc, #{max_age := Value} = Arguments) ->
+ {ok, Duration} = rabbit_date_time:parse_duration(Value),
+ DurationInSeconds = duration_to_seconds(Duration),
+ stream_arguments(maps:put(<<"max-age">>,
+ list_to_binary(integer_to_list(DurationInSeconds)
+ ++ "s"),
+ Acc),
+ maps:remove(max_age, Arguments));
+stream_arguments(Acc,
+ #{stream_max_segment_size_bytes := Value} = Arguments) ->
+ stream_arguments(maps:put(<<"stream-max-segment-size-bytes">>,
+ parse_information_unit(Value), Acc),
+ maps:remove(stream_max_segment_size_bytes, Arguments));
+stream_arguments(Acc, #{initial_cluster_size := Value} = Arguments) ->
+ stream_arguments(maps:put(<<"initial-cluster-size">>,
+ rabbit_data_coercion:to_binary(Value), Acc),
+ maps:remove(initial_cluster_size, Arguments));
+stream_arguments(Acc, #{leader_locator := Value} = Arguments) ->
+ stream_arguments(maps:put(<<"queue-leader-locator">>, Value, Acc),
+ maps:remove(leader_locator, Arguments));
+stream_arguments(ArgumentsAcc, _Arguments) ->
+ ArgumentsAcc.
+
+duration_to_seconds([{sign, _},
+ {years, Y},
+ {months, M},
+ {days, D},
+ {hours, H},
+ {minutes, Mn},
+ {seconds, S}]) ->
+ Y * 365 * 86400 + M * 30 * 86400 + D * 86400 + H * 3600 + Mn * 60 + S.
+
+create_super_stream(NodeName,
+ Timeout,
+ VHost,
+ SuperStream,
+ Streams,
+ Arguments,
+ RoutingKeys) ->
+ case rabbit_misc:rpc_call(NodeName,
+ rabbit_stream_manager,
+ create_super_stream,
+ [VHost,
+ SuperStream,
+ Streams,
+ Arguments,
+ RoutingKeys,
+ cli_acting_user()],
+ Timeout)
+ of
+ ok ->
+ {ok,
+ rabbit_misc:format("Super stream ~s has been created",
+ [SuperStream])};
+ Error ->
+ Error
+ end.
+
+banner(_, _) ->
+ <<"Adding a super stream (experimental feature)...">>.
+
+output({error, Msg}, _Opts) ->
+ {error, 'Elixir.RabbitMQ.CLI.Core.ExitCodes':exit_software(), Msg};
+output({ok, Msg}, _Opts) ->
+ {ok, Msg}.
+
+cli_acting_user() ->
+ 'Elixir.RabbitMQ.CLI.Core.Helpers':cli_acting_user().
+
+parse_information_unit(Value) ->
+ case rabbit_resource_monitor_misc:parse_information_unit(Value) of
+ {ok, R} ->
+ integer_to_binary(R);
+ {error, _} ->
+ error
+ end.
diff --git a/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteSuperStreamCommand.erl b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteSuperStreamCommand.erl
new file mode 100644
index 0000000000..f2915c4442
--- /dev/null
+++ b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteSuperStreamCommand.erl
@@ -0,0 +1,97 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 2.0 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+
+-module('Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteSuperStreamCommand').
+
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
+
+-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+
+-ignore_xref([{'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Core.Helpers', cli_acting_user, 0},
+ {'Elixir.RabbitMQ.CLI.Core.ExitCodes', exit_software, 0}]).
+
+-export([scopes/0,
+ usage/0,
+ usage_additional/0,
+ usage_doc_guides/0,
+ banner/2,
+ validate/2,
+ merge_defaults/2,
+ run/2,
+ output/2,
+ description/0,
+ help_section/0]).
+
+scopes() ->
+ [streams].
+
+description() ->
+ <<"Delete a super stream (experimental feature)">>.
+
+help_section() ->
+ {plugin, stream}.
+
+validate([], _Opts) ->
+ {validation_failure, not_enough_args};
+validate([_Name], _Opts) ->
+ ok;
+validate(_, _Opts) ->
+ {validation_failure, too_many_args}.
+
+merge_defaults(_Args, Opts) ->
+ {_Args, maps:merge(#{vhost => <<"/">>}, Opts)}.
+
+usage() ->
+ <<"delete_super_stream <name> [--vhost <vhost>]">>.
+
+usage_additional() ->
+ [["<name>", "The name of the super stream to delete."],
+ ["--vhost <vhost>", "The virtual host of the super stream."]].
+
+usage_doc_guides() ->
+ [?STREAM_GUIDE_URL].
+
+run([SuperStream],
+ #{node := NodeName,
+ vhost := VHost,
+ timeout := Timeout}) ->
+ delete_super_stream(NodeName, Timeout, VHost, SuperStream).
+
+delete_super_stream(NodeName, Timeout, VHost, SuperStream) ->
+ case rabbit_misc:rpc_call(NodeName,
+ rabbit_stream_manager,
+ delete_super_stream,
+ [VHost, SuperStream, cli_acting_user()],
+ Timeout)
+ of
+ ok ->
+ {ok,
+ rabbit_misc:format("Super stream ~s has been deleted",
+ [SuperStream])};
+ Error ->
+ Error
+ end.
+
+banner(_, _) ->
+ <<"Deleting a super stream (experimental feature)...">>.
+
+output({error, Msg}, _Opts) ->
+ {error, 'Elixir.RabbitMQ.CLI.Core.ExitCodes':exit_software(), Msg};
+output({ok, Msg}, _Opts) ->
+ {ok, Msg}.
+
+cli_acting_user() ->
+ 'Elixir.RabbitMQ.CLI.Core.Helpers':cli_acting_user().
diff --git a/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConnectionsCommand.erl b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConnectionsCommand.erl
index f185ab044e..f9e49310cf 100644
--- a/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConnectionsCommand.erl
+++ b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConnectionsCommand.erl
@@ -11,14 +11,21 @@
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConnectionsCommand').
--include("rabbit_stream.hrl").
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+-ignore_xref([{'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Core.Helpers', nodes_in_cluster, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', prepare_info_keys, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.RpcStream', receive_list_items, 7},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', validate_info_keys, 2},
+ {'Elixir.Enum', join, 2}]).
+
-export([formatter/0,
scopes/0,
switches/0,
@@ -34,23 +41,32 @@
description/0,
help_section/0]).
-formatter() -> 'Elixir.RabbitMQ.CLI.Formatters.Table'.
+formatter() ->
+ 'Elixir.RabbitMQ.CLI.Formatters.PrettyTable'.
+
+scopes() ->
+ [ctl, diagnostics, streams].
-scopes() -> [ctl, diagnostics, streams].
+switches() ->
+ [{verbose, boolean}].
-switches() -> [{verbose, boolean}].
-aliases() -> [{'V', verbose}].
+aliases() ->
+ [{'V', verbose}].
-description() -> <<"Lists stream connections on the target node">>.
+description() ->
+ <<"Lists stream connections">>.
help_section() ->
{plugin, stream}.
validate(Args, _) ->
case 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':validate_info_keys(Args,
- ?INFO_ITEMS) of
- {ok, _} -> ok;
- Error -> Error
+ ?INFO_ITEMS)
+ of
+ {ok, _} ->
+ ok;
+ Error ->
+ Error
end.
merge_defaults([], Opts) ->
@@ -63,33 +79,37 @@ usage() ->
usage_additional() ->
Prefix = <<" must be one of ">>,
- InfoItems = 'Elixir.Enum':join(lists:usort(?INFO_ITEMS), <<", ">>),
- [
- {<<"<column>">>, <<Prefix/binary, InfoItems/binary>>}
- ].
+ InfoItems =
+ 'Elixir.Enum':join(
+ lists:usort(?INFO_ITEMS), <<", ">>),
+ [{<<"<column>">>, <<Prefix/binary, InfoItems/binary>>}].
usage_doc_guides() ->
[?STREAM_GUIDE_URL].
-run(Args, #{node := NodeName,
- timeout := Timeout,
- verbose := Verbose}) ->
- InfoKeys = case Verbose of
- true -> ?INFO_ITEMS;
- false -> 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':prepare_info_keys(Args)
- end,
+run(Args,
+ #{node := NodeName,
+ timeout := Timeout,
+ verbose := Verbose}) ->
+ InfoKeys =
+ case Verbose of
+ true ->
+ ?INFO_ITEMS;
+ false ->
+ 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':prepare_info_keys(Args)
+ end,
Nodes = 'Elixir.RabbitMQ.CLI.Core.Helpers':nodes_in_cluster(NodeName),
- 'Elixir.RabbitMQ.CLI.Ctl.RpcStream':receive_list_items(
- NodeName,
- rabbit_stream,
- emit_connection_info_all,
- [Nodes, InfoKeys],
- Timeout,
- InfoKeys,
- length(Nodes)).
+ 'Elixir.RabbitMQ.CLI.Ctl.RpcStream':receive_list_items(NodeName,
+ rabbit_stream,
+ emit_connection_info_all,
+ [Nodes, InfoKeys],
+ Timeout,
+ InfoKeys,
+ length(Nodes)).
-banner(_, _) -> <<"Listing stream connections ...">>.
+banner(_, _) ->
+ <<"Listing stream connections ...">>.
output(Result, _Opts) ->
'Elixir.RabbitMQ.CLI.DefaultOutput':output(Result).
diff --git a/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConsumersCommand.erl b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConsumersCommand.erl
new file mode 100644
index 0000000000..17af370f8f
--- /dev/null
+++ b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConsumersCommand.erl
@@ -0,0 +1,120 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 2.0 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+
+-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConsumersCommand').
+
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
+
+-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+
+-ignore_xref([{'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Core.Helpers', nodes_in_cluster, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', prepare_info_keys, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.RpcStream', receive_list_items, 7},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', validate_info_keys, 2},
+ {'Elixir.Enum', join, 2}]).
+
+-export([formatter/0,
+ scopes/0,
+ switches/0,
+ aliases/0,
+ usage/0,
+ usage_additional/0,
+ usage_doc_guides/0,
+ banner/2,
+ validate/2,
+ merge_defaults/2,
+ run/2,
+ output/2,
+ description/0,
+ help_section/0]).
+
+formatter() ->
+ 'Elixir.RabbitMQ.CLI.Formatters.PrettyTable'.
+
+scopes() ->
+ [ctl, diagnostics, streams].
+
+switches() ->
+ [{verbose, boolean}].
+
+aliases() ->
+ [{'V', verbose}].
+
+description() ->
+ <<"Lists all stream consumers for a vhost">>.
+
+help_section() ->
+ {plugin, stream}.
+
+validate(Args, _) ->
+ case 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':validate_info_keys(Args,
+ ?CONSUMER_INFO_ITEMS)
+ of
+ {ok, _} ->
+ ok;
+ Error ->
+ Error
+ end.
+
+merge_defaults([], Opts) ->
+ merge_defaults([rabbit_data_coercion:to_binary(Item)
+ || Item <- ?CONSUMER_INFO_ITEMS],
+ Opts);
+merge_defaults(Args, Opts) ->
+ {Args, maps:merge(#{verbose => false, vhost => <<"/">>}, Opts)}.
+
+usage() ->
+ <<"list_stream_consumers [--vhost <vhost>] [<column> "
+ "...]">>.
+
+usage_additional() ->
+ Prefix = <<" must be one of ">>,
+ InfoItems =
+ 'Elixir.Enum':join(
+ lists:usort(?CONSUMER_INFO_ITEMS), <<", ">>),
+ [{<<"<column>">>, <<Prefix/binary, InfoItems/binary>>}].
+
+usage_doc_guides() ->
+ [?STREAM_GUIDE_URL].
+
+run(Args,
+ #{node := NodeName,
+ vhost := VHost,
+ timeout := Timeout,
+ verbose := Verbose}) ->
+ InfoKeys =
+ case Verbose of
+ true ->
+ ?CONSUMER_INFO_ITEMS;
+ false ->
+ 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':prepare_info_keys(Args)
+ end,
+ Nodes = 'Elixir.RabbitMQ.CLI.Core.Helpers':nodes_in_cluster(NodeName),
+
+ 'Elixir.RabbitMQ.CLI.Ctl.RpcStream':receive_list_items(NodeName,
+ rabbit_stream,
+ emit_consumer_info_all,
+ [Nodes, VHost,
+ InfoKeys],
+ Timeout,
+ InfoKeys,
+ length(Nodes)).
+
+banner(_, _) ->
+ <<"Listing stream consumers ...">>.
+
+output(Result, _Opts) ->
+ 'Elixir.RabbitMQ.CLI.DefaultOutput':output(Result).
diff --git a/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamPublishersCommand.erl b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamPublishersCommand.erl
new file mode 100644
index 0000000000..72e59919e3
--- /dev/null
+++ b/deps/rabbitmq_stream/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamPublishersCommand.erl
@@ -0,0 +1,120 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 2.0 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2021 VMware, Inc. or its affiliates. All rights reserved.
+
+-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamPublishersCommand').
+
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
+
+-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+
+-ignore_xref([{'Elixir.RabbitMQ.CLI.DefaultOutput', output, 1},
+ {'Elixir.RabbitMQ.CLI.Core.Helpers', nodes_in_cluster, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', prepare_info_keys, 1},
+ {'Elixir.RabbitMQ.CLI.Ctl.RpcStream', receive_list_items, 7},
+ {'Elixir.RabbitMQ.CLI.Ctl.InfoKeys', validate_info_keys, 2},
+ {'Elixir.Enum', join, 2}]).
+
+-export([formatter/0,
+ scopes/0,
+ switches/0,
+ aliases/0,
+ usage/0,
+ usage_additional/0,
+ usage_doc_guides/0,
+ banner/2,
+ validate/2,
+ merge_defaults/2,
+ run/2,
+ output/2,
+ description/0,
+ help_section/0]).
+
+formatter() ->
+ 'Elixir.RabbitMQ.CLI.Formatters.PrettyTable'.
+
+scopes() ->
+ [ctl, diagnostics, streams].
+
+switches() ->
+ [{verbose, boolean}].
+
+aliases() ->
+ [{'V', verbose}].
+
+description() ->
+ <<"Lists all stream publishers for a vhost">>.
+
+help_section() ->
+ {plugin, stream}.
+
+validate(Args, _) ->
+ case 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':validate_info_keys(Args,
+ ?PUBLISHER_INFO_ITEMS)
+ of
+ {ok, _} ->
+ ok;
+ Error ->
+ Error
+ end.
+
+merge_defaults([], Opts) ->
+ merge_defaults([rabbit_data_coercion:to_binary(Item)
+ || Item <- ?PUBLISHER_INFO_ITEMS],
+ Opts);
+merge_defaults(Args, Opts) ->
+ {Args, maps:merge(#{verbose => false, vhost => <<"/">>}, Opts)}.
+
+usage() ->
+ <<"list_stream_publishers [--vhost <vhost>] [<column> "
+ "...]">>.
+
+usage_additional() ->
+ Prefix = <<" must be one of ">>,
+ InfoItems =
+ 'Elixir.Enum':join(
+ lists:usort(?PUBLISHER_INFO_ITEMS), <<", ">>),
+ [{<<"<column>">>, <<Prefix/binary, InfoItems/binary>>}].
+
+usage_doc_guides() ->
+ [?STREAM_GUIDE_URL].
+
+run(Args,
+ #{node := NodeName,
+ vhost := VHost,
+ timeout := Timeout,
+ verbose := Verbose}) ->
+ InfoKeys =
+ case Verbose of
+ true ->
+ ?PUBLISHER_INFO_ITEMS;
+ false ->
+ 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':prepare_info_keys(Args)
+ end,
+ Nodes = 'Elixir.RabbitMQ.CLI.Core.Helpers':nodes_in_cluster(NodeName),
+
+ 'Elixir.RabbitMQ.CLI.Ctl.RpcStream':receive_list_items(NodeName,
+ rabbit_stream,
+ emit_publisher_info_all,
+ [Nodes, VHost,
+ InfoKeys],
+ Timeout,
+ InfoKeys,
+ length(Nodes)).
+
+banner(_, _) ->
+ <<"Listing stream publishers ...">>.
+
+output(Result, _Opts) ->
+ 'Elixir.RabbitMQ.CLI.DefaultOutput':output(Result).
diff --git a/deps/rabbitmq_stream/src/rabbit_stream.erl b/deps/rabbitmq_stream/src/rabbit_stream.erl
index 8353d66d57..a8c9c824aa 100644
--- a/deps/rabbitmq_stream/src/rabbit_stream.erl
+++ b/deps/rabbitmq_stream/src/rabbit_stream.erl
@@ -11,25 +11,67 @@
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is Pivotal Software, Inc.
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream).
+
-behaviour(application).
--export([start/2, host/0, port/0, kill_connection/1]).
+-export([start/2,
+ host/0,
+ tls_host/0,
+ port/0,
+ tls_port/0,
+ kill_connection/1]).
-export([stop/1]).
-export([emit_connection_info_local/3,
- emit_connection_info_all/4,
- list/0]).
+ emit_connection_info_all/4,
+ emit_consumer_info_all/5,
+ emit_consumer_info_local/4,
+ emit_publisher_info_all/5,
+ emit_publisher_info_local/4,
+ list/1]).
-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
+
+-include("rabbit_stream_metrics.hrl").
start(_Type, _Args) ->
- rabbit_stream_sup:start_link().
+ FeatureFlagsEnabled = rabbit_ff_registry:list(enabled),
+ case maps:is_key(stream_queue, FeatureFlagsEnabled) of
+ true ->
+ rabbit_stream_metrics:init(),
+ rabbit_global_counters:init([{protocol, stream}],
+ ?PROTOCOL_COUNTERS),
+ rabbit_global_counters:init([{protocol, stream},
+ {queue_type, ?STREAM_QUEUE_TYPE}]),
+ rabbit_stream_sup:start_link();
+ false ->
+ rabbit_log:warning("Unable to start the stream plugin. The stream_queue "
+ "feature flag is disabled. "
+ ++ "Enable stream_queue feature flag then disable "
+ "and re-enable the rabbitmq_stream plugin. ",
+ "See https://www.rabbitmq.com/feature-flags.html "
+ "to learn more",
+ []),
+ {ok, self()}
+ end.
+
+tls_host() ->
+ case application:get_env(rabbitmq_stream, advertised_tls_host,
+ undefined)
+ of
+ undefined ->
+ host();
+ Host ->
+ rabbit_data_coercion:to_binary(Host)
+ end.
host() ->
- case application:get_env(rabbitmq_stream, advertised_host, undefined) of
+ case application:get_env(rabbitmq_stream, advertised_host, undefined)
+ of
undefined ->
hostname_from_node();
Host ->
@@ -37,17 +79,20 @@ host() ->
end.
hostname_from_node() ->
- case re:split(rabbit_data_coercion:to_binary(node()),
- "@",
- [{return, binary}, {parts, 2}]) of
+ case re:split(
+ rabbit_data_coercion:to_binary(node()), "@",
+ [{return, binary}, {parts, 2}])
+ of
[_, Hostname] ->
Hostname;
[_] ->
- rabbit_data_coercion:to_binary(inet:gethostname())
+ {ok, H} = inet:gethostname(),
+ rabbit_data_coercion:to_binary(H)
end.
port() ->
- case application:get_env(rabbitmq_stream, advertised_port, undefined) of
+ case application:get_env(rabbitmq_stream, advertised_port, undefined)
+ of
undefined ->
port_from_listener();
Port ->
@@ -56,11 +101,35 @@ port() ->
port_from_listener() ->
Listeners = rabbit_networking:node_listeners(node()),
- Port = lists:foldl(fun(#listener{port = Port, protocol = stream}, _Acc) ->
- Port;
- (_, Acc) ->
- Acc
- end, undefined, Listeners),
+ Port =
+ lists:foldl(fun (#listener{port = Port, protocol = stream}, _Acc) ->
+ Port;
+ (_, Acc) ->
+ Acc
+ end,
+ undefined, Listeners),
+ Port.
+
+tls_port() ->
+ case application:get_env(rabbitmq_stream, advertised_tls_port,
+ undefined)
+ of
+ undefined ->
+ tls_port_from_listener();
+ Port ->
+ Port
+ end.
+
+tls_port_from_listener() ->
+ Listeners = rabbit_networking:node_listeners(node()),
+ Port =
+ lists:foldl(fun (#listener{port = Port, protocol = 'stream/ssl'},
+ _Acc) ->
+ Port;
+ (_, Acc) ->
+ Acc
+ end,
+ undefined, Listeners),
Port.
stop(_State) ->
@@ -69,35 +138,88 @@ stop(_State) ->
kill_connection(ConnectionName) ->
ConnectionNameBin = rabbit_data_coercion:to_binary(ConnectionName),
lists:foreach(fun(ConnectionPid) ->
- ConnectionPid ! {infos, self()},
- receive
- {ConnectionPid, #{<<"connection_name">> := ConnectionNameBin}} ->
- exit(ConnectionPid, kill);
- {ConnectionPid, _ClientProperties} ->
- ok
- after 1000 ->
- ok
- end
- end, pg_local:get_members(rabbit_stream_connections)).
+ ConnectionPid ! {infos, self()},
+ receive
+ {ConnectionPid,
+ #{<<"connection_name">> := ConnectionNameBin}} ->
+ exit(ConnectionPid, kill);
+ {ConnectionPid, _ClientProperties} -> ok
+ after 1000 -> ok
+ end
+ end,
+ pg_local:get_members(rabbit_stream_connections)).
emit_connection_info_all(Nodes, Items, Ref, AggregatorPid) ->
- Pids = [spawn_link(Node, rabbit_stream, emit_connection_info_local,
- [Items, Ref, AggregatorPid])
- || Node <- Nodes],
+ Pids =
+ [spawn_link(Node,
+ rabbit_stream,
+ emit_connection_info_local,
+ [Items, Ref, AggregatorPid])
+ || Node <- Nodes],
rabbit_control_misc:await_emitters_termination(Pids),
ok.
emit_connection_info_local(Items, Ref, AggregatorPid) ->
- rabbit_control_misc:emitting_map_with_exit_handler(
- AggregatorPid, Ref, fun(Pid) ->
- rabbit_stream_reader:info(Pid, Items)
- end,
- list()).
+ rabbit_control_misc:emitting_map_with_exit_handler(AggregatorPid,
+ Ref,
+ fun(Pid) ->
+ rabbit_stream_reader:info(Pid,
+ Items)
+ end,
+ list(undefined)).
+
+emit_consumer_info_all(Nodes, VHost, Items, Ref, AggregatorPid) ->
+ Pids =
+ [spawn_link(Node,
+ rabbit_stream,
+ emit_consumer_info_local,
+ [VHost, Items, Ref, AggregatorPid])
+ || Node <- Nodes],
+ rabbit_control_misc:await_emitters_termination(Pids),
+ ok.
+
+emit_consumer_info_local(VHost, Items, Ref, AggregatorPid) ->
+ rabbit_control_misc:emitting_map_with_exit_handler(AggregatorPid,
+ Ref,
+ fun(Pid) ->
+ rabbit_stream_reader:consumers_info(Pid,
+ Items)
+ end,
+ list(VHost)).
+
+emit_publisher_info_all(Nodes, VHost, Items, Ref, AggregatorPid) ->
+ Pids =
+ [spawn_link(Node,
+ rabbit_stream,
+ emit_publisher_info_local,
+ [VHost, Items, Ref, AggregatorPid])
+ || Node <- Nodes],
+ rabbit_control_misc:await_emitters_termination(Pids),
+ ok.
+
+emit_publisher_info_local(VHost, Items, Ref, AggregatorPid) ->
+ rabbit_control_misc:emitting_map_with_exit_handler(AggregatorPid,
+ Ref,
+ fun(Pid) ->
+ rabbit_stream_reader:publishers_info(Pid,
+ Items)
+ end,
+ list(VHost)).
-list() ->
+list(VHost) ->
[Client
- || {_, ListSupPid, _, _} <- supervisor2:which_children(rabbit_stream_sup),
- {_, RanchSup, supervisor, _} <- supervisor2:which_children(ListSupPid),
- {ranch_conns_sup, ConnSup, _, _} <- supervisor:which_children(RanchSup),
- {_, CliSup, _, _} <- supervisor:which_children(ConnSup),
- {rabbit_stream_reader, Client, _, _} <- supervisor:which_children(CliSup)]. \ No newline at end of file
+ || {_, ListSup, _, _}
+ <- supervisor2:which_children(rabbit_stream_sup),
+ {_, RanchEmbeddedSup, supervisor, _}
+ <- supervisor2:which_children(ListSup),
+ {{ranch_listener_sup, _}, RanchListSup, _, _}
+ <- supervisor:which_children(RanchEmbeddedSup),
+ {ranch_conns_sup_sup, RanchConnsSup, supervisor, _}
+ <- supervisor2:which_children(RanchListSup),
+ {_, RanchConnSup, supervisor, _}
+ <- supervisor2:which_children(RanchConnsSup),
+ {_, StreamClientSup, supervisor, _}
+ <- supervisor2:which_children(RanchConnSup),
+ {rabbit_stream_reader, Client, _, _}
+ <- supervisor:which_children(StreamClientSup),
+ rabbit_stream_reader:in_vhost(Client, VHost)].
diff --git a/deps/rabbitmq_stream/src/rabbit_stream_connection_sup.erl b/deps/rabbitmq_stream/src/rabbit_stream_connection_sup.erl
index 3092a68517..a6f99b56d9 100644
--- a/deps/rabbitmq_stream/src/rabbit_stream_connection_sup.erl
+++ b/deps/rabbitmq_stream/src/rabbit_stream_connection_sup.erl
@@ -11,7 +11,7 @@
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is Pivotal Software, Inc.
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream_connection_sup).
@@ -21,23 +21,30 @@
-include_lib("rabbit_common/include/rabbit.hrl").
--export([start_link/4, start_keepalive_link/0]).
-
+-export([start_link/3,
+ start_keepalive_link/0]).
-export([init/1]).
-
-start_link(Ref, _Sock, Transport, Opts) ->
+start_link(Ref, Transport, Opts) ->
{ok, SupPid} = supervisor2:start_link(?MODULE, []),
- {ok, KeepaliveSup} = supervisor2:start_child(
- SupPid,
- {rabbit_stream_keepalive_sup,
- {rabbit_stream_connection_sup, start_keepalive_link, []},
- intrinsic, infinity, supervisor, [rabbit_keepalive_sup]}),
- {ok, ReaderPid} = supervisor2:start_child(
- SupPid,
- {rabbit_stream_reader,
- {rabbit_stream_reader, start_link, [KeepaliveSup, Transport, Ref, Opts]},
- intrinsic, ?WORKER_WAIT, worker, [rabbit_stream_reader]}),
+ {ok, KeepaliveSup} =
+ supervisor2:start_child(SupPid,
+ {rabbit_stream_keepalive_sup,
+ {rabbit_stream_connection_sup,
+ start_keepalive_link, []},
+ intrinsic,
+ infinity,
+ supervisor,
+ [rabbit_keepalive_sup]}),
+ {ok, ReaderPid} =
+ supervisor2:start_child(SupPid,
+ {rabbit_stream_reader,
+ {rabbit_stream_reader, start_link,
+ [KeepaliveSup, Transport, Ref, Opts]},
+ intrinsic,
+ ?WORKER_WAIT,
+ worker,
+ [rabbit_stream_reader]}),
{ok, SupPid, ReaderPid}.
start_keepalive_link() ->
diff --git a/deps/rabbitmq_stream/src/rabbit_stream_manager.erl b/deps/rabbitmq_stream/src/rabbit_stream_manager.erl
index e418dd1022..b8f4fdc923 100644
--- a/deps/rabbitmq_stream/src/rabbit_stream_manager.erl
+++ b/deps/rabbitmq_stream/src/rabbit_stream_manager.erl
@@ -11,21 +11,33 @@
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is Pivotal Software, Inc.
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream_manager).
+
-behaviour(gen_server).
-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbit/include/amqqueue.hrl").
%% API
--export([init/1, handle_call/3, handle_cast/2, handle_info/2]).
--export([start_link/1, create/4, delete/3, lookup_leader/2, lookup_local_member/2, topology/2]).
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2]).
+-export([start_link/1,
+ create/4,
+ delete/3,
+ create_super_stream/6,
+ delete_super_stream/3,
+ lookup_leader/2,
+ lookup_local_member/2,
+ topology/2,
+ route/3,
+ partitions/2]).
--record(state, {
- configuration
-}).
+-record(state, {configuration}).
start_link(Conf) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Conf], []).
@@ -34,70 +46,133 @@ init([Conf]) ->
{ok, #state{configuration = Conf}}.
-spec create(binary(), binary(), #{binary() => binary()}, binary()) ->
- {ok, map()} | {error, reference_already_exists} | {error, internal_error}.
+ {ok, map()} |
+ {error, reference_already_exists} |
+ {error, internal_error} |
+ {error, validation_failed}.
create(VirtualHost, Reference, Arguments, Username) ->
- gen_server:call(?MODULE, {create, VirtualHost, Reference, Arguments, Username}).
+ gen_server:call(?MODULE,
+ {create, VirtualHost, Reference, Arguments, Username}).
-spec delete(binary(), binary(), binary()) ->
- {ok, deleted} | {error, reference_not_found}.
+ {ok, deleted} | {error, reference_not_found}.
delete(VirtualHost, Reference, Username) ->
gen_server:call(?MODULE, {delete, VirtualHost, Reference, Username}).
--spec lookup_leader(binary(), binary()) -> pid() | cluster_not_found.
+-spec create_super_stream(binary(),
+ binary(),
+ [binary()],
+ #{binary() => binary()},
+ [binary()],
+ binary()) ->
+ ok | {error, term()}.
+create_super_stream(VirtualHost,
+ Name,
+ Partitions,
+ Arguments,
+ RoutingKeys,
+ Username) ->
+ gen_server:call(?MODULE,
+ {create_super_stream,
+ VirtualHost,
+ Name,
+ Partitions,
+ Arguments,
+ RoutingKeys,
+ Username}).
+
+-spec delete_super_stream(binary(), binary(), binary()) ->
+ ok | {error, term()}.
+delete_super_stream(VirtualHost, Name, Username) ->
+ gen_server:call(?MODULE,
+ {delete_super_stream, VirtualHost, Name, Username}).
+
+-spec lookup_leader(binary(), binary()) ->
+ {ok, pid()} | {error, not_available} |
+ {error, not_found}.
lookup_leader(VirtualHost, Stream) ->
gen_server:call(?MODULE, {lookup_leader, VirtualHost, Stream}).
--spec lookup_local_member(binary(), binary()) -> {ok, pid()} | {error, not_found}.
+-spec lookup_local_member(binary(), binary()) ->
+ {ok, pid()} | {error, not_found} |
+ {error, not_available}.
lookup_local_member(VirtualHost, Stream) ->
gen_server:call(?MODULE, {lookup_local_member, VirtualHost, Stream}).
-spec topology(binary(), binary()) ->
- {ok, #{leader_node => pid(), replica_nodes => [pid()]}} | {error, stream_not_found}.
+ {ok,
+ #{leader_node => undefined | pid(),
+ replica_nodes => [pid()]}} |
+ {error, stream_not_found} | {error, stream_not_available}.
topology(VirtualHost, Stream) ->
gen_server:call(?MODULE, {topology, VirtualHost, Stream}).
+-spec route(binary(), binary(), binary()) ->
+ {ok, [binary()] | no_route} | {error, stream_not_found}.
+route(RoutingKey, VirtualHost, SuperStream) ->
+ gen_server:call(?MODULE,
+ {route, RoutingKey, VirtualHost, SuperStream}).
+
+-spec partitions(binary(), binary()) ->
+ {ok, [binary()]} | {error, stream_not_found}.
+partitions(VirtualHost, SuperStream) ->
+ gen_server:call(?MODULE, {partitions, VirtualHost, SuperStream}).
+
stream_queue_arguments(Arguments) ->
- stream_queue_arguments([{<<"x-queue-type">>, longstr, <<"stream">>}], Arguments).
+ stream_queue_arguments([{<<"x-queue-type">>, longstr, <<"stream">>}],
+ Arguments).
-stream_queue_arguments(ArgumentsAcc, Arguments) when map_size(Arguments) =:= 0 ->
+stream_queue_arguments(ArgumentsAcc, Arguments)
+ when map_size(Arguments) =:= 0 ->
ArgumentsAcc;
-stream_queue_arguments(ArgumentsAcc, #{<<"max-length-bytes">> := Value} = Arguments) ->
- stream_queue_arguments(
- [{<<"x-max-length-bytes">>, long, binary_to_integer(Value)}] ++ ArgumentsAcc,
- maps:remove(<<"max-length-bytes">>, Arguments)
- );
-stream_queue_arguments(ArgumentsAcc, #{<<"max-age">> := Value} = Arguments) ->
- stream_queue_arguments(
- [{<<"x-max-age">>, longstr, Value}] ++ ArgumentsAcc,
- maps:remove(<<"max-age">>, Arguments)
- );
-stream_queue_arguments(ArgumentsAcc, #{<<"max-segment-size">> := Value} = Arguments) ->
- stream_queue_arguments(
- [{<<"x-max-segment-size">>, long, binary_to_integer(Value)}] ++ ArgumentsAcc,
- maps:remove(<<"max-segment-size">>, Arguments)
- );
-stream_queue_arguments(ArgumentsAcc, #{<<"initial-cluster-size">> := Value} = Arguments) ->
- stream_queue_arguments(
- [{<<"x-initial-cluster-size">>, long, binary_to_integer(Value)}] ++ ArgumentsAcc,
- maps:remove(<<"initial-cluster-size">>, Arguments)
- );
-stream_queue_arguments(ArgumentsAcc, #{<<"queue-leader-locator">> := Value} = Arguments) ->
- stream_queue_arguments(
- [{<<"x-queue-leader-locator">>, longstr, Value}] ++ ArgumentsAcc,
- maps:remove(<<"queue-leader-locator">>, Arguments)
- );
+stream_queue_arguments(ArgumentsAcc,
+ #{<<"max-length-bytes">> := Value} = Arguments) ->
+ stream_queue_arguments([{<<"x-max-length-bytes">>, long,
+ binary_to_integer(Value)}]
+ ++ ArgumentsAcc,
+ maps:remove(<<"max-length-bytes">>, Arguments));
+stream_queue_arguments(ArgumentsAcc,
+ #{<<"max-age">> := Value} = Arguments) ->
+ stream_queue_arguments([{<<"x-max-age">>, longstr, Value}]
+ ++ ArgumentsAcc,
+ maps:remove(<<"max-age">>, Arguments));
+stream_queue_arguments(ArgumentsAcc,
+ #{<<"stream-max-segment-size-bytes">> := Value} =
+ Arguments) ->
+ stream_queue_arguments([{<<"x-stream-max-segment-size-bytes">>, long,
+ binary_to_integer(Value)}]
+ ++ ArgumentsAcc,
+ maps:remove(<<"stream-max-segment-size-bytes">>,
+ Arguments));
+stream_queue_arguments(ArgumentsAcc,
+ #{<<"initial-cluster-size">> := Value} = Arguments) ->
+ stream_queue_arguments([{<<"x-initial-cluster-size">>, long,
+ binary_to_integer(Value)}]
+ ++ ArgumentsAcc,
+ maps:remove(<<"initial-cluster-size">>, Arguments));
+stream_queue_arguments(ArgumentsAcc,
+ #{<<"queue-leader-locator">> := Value} = Arguments) ->
+ stream_queue_arguments([{<<"x-queue-leader-locator">>, longstr,
+ Value}]
+ ++ ArgumentsAcc,
+ maps:remove(<<"queue-leader-locator">>, Arguments));
stream_queue_arguments(ArgumentsAcc, _Arguments) ->
ArgumentsAcc.
validate_stream_queue_arguments([]) ->
ok;
-validate_stream_queue_arguments([{<<"x-initial-cluster-size">>, long, ClusterSize} | _]) when ClusterSize =< 0 ->
+validate_stream_queue_arguments([{<<"x-initial-cluster-size">>, long,
+ ClusterSize}
+ | _])
+ when ClusterSize =< 0 ->
error;
-validate_stream_queue_arguments([{<<"x-queue-leader-locator">>, longstr, Locator} | T]) ->
- case lists:member(Locator, [<<"client-local">>,
- <<"random">>,
- <<"least-leaders">>]) of
- true ->
+validate_stream_queue_arguments([{<<"x-queue-leader-locator">>,
+ longstr, Locator}
+ | T]) ->
+ case lists:member(Locator,
+ [<<"client-local">>, <<"random">>, <<"least-leaders">>])
+ of
+ true ->
validate_stream_queue_arguments(T);
false ->
error
@@ -105,90 +180,163 @@ validate_stream_queue_arguments([{<<"x-queue-leader-locator">>, longstr, Locator
validate_stream_queue_arguments([_ | T]) ->
validate_stream_queue_arguments(T).
-
-handle_call({create, VirtualHost, Reference, Arguments, Username}, _From, State) ->
- Name = #resource{virtual_host = VirtualHost, kind = queue, name = Reference},
- StreamQueueArguments = stream_queue_arguments(Arguments),
- case validate_stream_queue_arguments(StreamQueueArguments) of
+handle_call({create, VirtualHost, Reference, Arguments, Username},
+ _From, State) ->
+ {reply, create_stream(VirtualHost, Reference, Arguments, Username),
+ State};
+handle_call({delete, VirtualHost, Reference, Username}, _From,
+ State) ->
+ {reply, delete_stream(VirtualHost, Reference, Username), State};
+handle_call({create_super_stream,
+ VirtualHost,
+ Name,
+ Partitions,
+ Arguments,
+ RoutingKeys,
+ Username},
+ _From, State) ->
+ case validate_super_stream_creation(VirtualHost, Name, Partitions) of
+ {error, Reason} ->
+ {reply, {error, Reason}, State};
ok ->
- Q0 = amqqueue:new(
- Name,
- none, true, false, none, StreamQueueArguments,
- VirtualHost, #{user => Username}, rabbit_stream_queue
- ),
- try
- case rabbit_stream_queue:declare(Q0, node()) of
- {new, Q} ->
- {reply, {ok, amqqueue:get_type_state(Q)}, State};
- {existing, _} ->
- {reply, {error, reference_already_exists}, State};
- {error, Err} ->
- rabbit_log:warning("Error while creating ~p stream, ~p~n", [Reference, Err]),
- {reply, {error, internal_error}, State}
- end
- catch
- exit:Error ->
- rabbit_log:info("Error while creating ~p stream, ~p~n", [Reference, Error]),
- {reply, {error, internal_error}, State}
- end;
- error ->
- {reply, {error, validation_failed}, State}
+ case declare_super_stream_exchange(VirtualHost, Name, Username) of
+ ok ->
+ RollbackOperations =
+ [fun() ->
+ delete_super_stream_exchange(VirtualHost, Name,
+ Username)
+ end],
+ QueueCreationsResult =
+ lists:foldl(fun (Partition, {ok, RollbackOps}) ->
+ case create_stream(VirtualHost,
+ Partition,
+ Arguments,
+ Username)
+ of
+ {ok, _} ->
+ {ok,
+ [fun() ->
+ delete_stream(VirtualHost,
+ Partition,
+ Username)
+ end]
+ ++ RollbackOps};
+ {error, Reason} ->
+ {{error, Reason},
+ RollbackOps}
+ end;
+ (_,
+ {{error, _Reason}, _RollbackOps} =
+ Acc) ->
+ Acc
+ end,
+ {ok, RollbackOperations}, Partitions),
+ case QueueCreationsResult of
+ {ok, RollbackOps} ->
+ BindingsResult =
+ add_super_stream_bindings(VirtualHost,
+ Name,
+ Partitions,
+ RoutingKeys,
+ Username),
+ case BindingsResult of
+ ok ->
+ {reply, ok, State};
+ Error ->
+ [Fun() || Fun <- RollbackOps],
+ {reply, Error, State}
+ end;
+ {{error, Reason}, RollbackOps} ->
+ [Fun() || Fun <- RollbackOps],
+ {reply, {error, Reason}, State}
+ end;
+ {error, Msg} ->
+ {reply, {error, Msg}, State}
+ end
end;
-handle_call({delete, VirtualHost, Reference, Username}, _From, State) ->
- Name = #resource{virtual_host = VirtualHost, kind = queue, name = Reference},
- rabbit_log:debug("Trying to delete stream ~p~n", [Reference]),
- case rabbit_amqqueue:lookup(Name) of
- {ok, Q} ->
- rabbit_log:debug("Found queue record ~p, checking if it is a stream~n", [Reference]),
- case is_stream_queue(Q) of
- true ->
- rabbit_log:debug("Queue record ~p is a stream, trying to delete it~n", [Reference]),
- {ok, _} = rabbit_stream_queue:delete(Q, false, false, Username),
- rabbit_log:debug("Stream ~p deleted~n", [Reference]),
- {reply, {ok, deleted}, State};
- _ ->
- rabbit_log:debug("Queue record ~p is NOT a stream, returning error~n", [Reference]),
- {reply, {error, reference_not_found}, State}
- end;
- {error, not_found} ->
- rabbit_log:debug("Stream ~p not found, cannot delete it~n", [Reference]),
- {reply, {error, reference_not_found}, State}
+handle_call({delete_super_stream, VirtualHost, SuperStream, Username},
+ _From, State) ->
+ case super_stream_partitions(VirtualHost, SuperStream) of
+ {ok, Partitions} ->
+ case delete_super_stream_exchange(VirtualHost, SuperStream,
+ Username)
+ of
+ ok ->
+ ok;
+ {error, Error} ->
+ rabbit_log:warning("Error while deleting super stream exchange ~p, ~p",
+ [SuperStream, Error]),
+ ok
+ end,
+ [begin
+ case delete_stream(VirtualHost, Stream, Username) of
+ {ok, deleted} ->
+ ok;
+ {error, Err} ->
+ rabbit_log:warning("Error while delete partition ~p of super stream "
+ "~p, ~p",
+ [Stream, SuperStream, Err]),
+ ok
+ end
+ end
+ || Stream <- Partitions],
+ {reply, ok, State};
+ {error, Error} ->
+ {reply, {error, Error}, State}
end;
handle_call({lookup_leader, VirtualHost, Stream}, _From, State) ->
- Name = #resource{virtual_host = VirtualHost, kind = queue, name = Stream},
+ Name =
+ #resource{virtual_host = VirtualHost,
+ kind = queue,
+ name = Stream},
Res = case rabbit_amqqueue:lookup(Name) of
{ok, Q} ->
case is_stream_queue(Q) of
true ->
- #{leader_pid := LeaderPid} = amqqueue:get_type_state(Q),
- LeaderPid;
+ LeaderPid = amqqueue:get_pid(Q),
+ case process_alive(LeaderPid) of
+ true ->
+ {ok, LeaderPid};
+ false ->
+ case leader_from_members(Q) of
+ {ok, Pid} ->
+ {ok, Pid};
+ _ ->
+ {error, not_available}
+ end
+ end;
_ ->
- cluster_not_found
+ {error, not_found}
end;
- _ ->
- cluster_not_found
+ {error, not_found} ->
+ case rabbit_amqqueue:not_found_or_absent_dirty(Name) of
+ not_found ->
+ {error, not_found};
+ _ ->
+ {error, not_available}
+ end
end,
{reply, Res, State};
-handle_call({lookup_local_member, VirtualHost, Stream}, _From, State) ->
- Name = #resource{virtual_host = VirtualHost, kind = queue, name = Stream},
+handle_call({lookup_local_member, VirtualHost, Stream}, _From,
+ State) ->
+ Name =
+ #resource{virtual_host = VirtualHost,
+ kind = queue,
+ name = Stream},
Res = case rabbit_amqqueue:lookup(Name) of
{ok, Q} ->
case is_stream_queue(Q) of
true ->
- #{leader_pid := LeaderPid, replica_pids := ReplicaPids} = amqqueue:get_type_state(Q),
- LocalMember = lists:foldl(fun(Pid, Acc) ->
- case node(Pid) =:= node() of
- true ->
- Pid;
- false ->
- Acc
- end
- end, undefined, [LeaderPid] ++ ReplicaPids),
- case LocalMember of
- undefined ->
+ #{name := StreamName} = amqqueue:get_type_state(Q),
+ % FIXME check if pid is alive in case of stale information
+ case rabbit_stream_coordinator:local_pid(StreamName)
+ of
+ {ok, Pid} when is_pid(Pid) ->
+ {ok, Pid};
+ {error, timeout} ->
{error, not_available};
- Pid ->
- {ok, Pid}
+ _ ->
+ {error, not_available}
end;
_ ->
{error, not_found}
@@ -199,36 +347,49 @@ handle_call({lookup_local_member, VirtualHost, Stream}, _From, State) ->
{error, not_found};
_ ->
{error, not_available}
- end;
- _ ->
- {error, not_found}
+ end
end,
{reply, Res, State};
handle_call({topology, VirtualHost, Stream}, _From, State) ->
- Name = #resource{virtual_host = VirtualHost, kind = queue, name = Stream},
+ Name =
+ #resource{virtual_host = VirtualHost,
+ kind = queue,
+ name = Stream},
Res = case rabbit_amqqueue:lookup(Name) of
{ok, Q} ->
case is_stream_queue(Q) of
true ->
QState = amqqueue:get_type_state(Q),
- ProcessAliveFun = fun(Pid) ->
- rpc:call(node(Pid), erlang, is_process_alive, [Pid], 10000)
- end,
- LeaderNode = case ProcessAliveFun(maps:get(leader_pid, QState)) of
- true ->
- maps:get(leader_node, QState);
- _ ->
- undefined
- end,
- ReplicaNodes = lists:foldl(fun(Pid, Acc) ->
- case ProcessAliveFun(Pid) of
- true ->
- Acc ++ [node(Pid)];
+ #{name := StreamName} = QState,
+ StreamMembers =
+ case rabbit_stream_coordinator:members(StreamName)
+ of
+ {ok, Members} ->
+ maps:fold(fun (_Node, {undefined, _Role},
+ Acc) ->
+ Acc;
+ (LeaderNode, {_Pid, writer},
+ Acc) ->
+ Acc#{leader_node =>
+ LeaderNode};
+ (ReplicaNode,
+ {_Pid, replica}, Acc) ->
+ #{replica_nodes :=
+ ReplicaNodes} =
+ Acc,
+ Acc#{replica_nodes =>
+ ReplicaNodes
+ ++ [ReplicaNode]};
+ (_Node, _, Acc) ->
+ Acc
+ end,
+ #{leader_node => undefined,
+ replica_nodes => []},
+ Members);
_ ->
- Acc
- end
- end, [], maps:get(replica_pids, QState)),
- {ok, #{leader_node => LeaderNode, replica_nodes => ReplicaNodes}};
+ {error, stream_not_found}
+ end,
+ {ok, StreamMembers};
_ ->
{error, stream_not_found}
end;
@@ -238,11 +399,36 @@ handle_call({topology, VirtualHost, Stream}, _From, State) ->
{error, stream_not_found};
_ ->
{error, stream_not_available}
- end;
- _ ->
+ end
+ end,
+ {reply, Res, State};
+handle_call({route, RoutingKey, VirtualHost, SuperStream}, _From,
+ State) ->
+ ExchangeName = rabbit_misc:r(VirtualHost, exchange, SuperStream),
+ Res = try
+ Exchange = rabbit_exchange:lookup_or_die(ExchangeName),
+ Delivery =
+ #delivery{message =
+ #basic_message{routing_keys = [RoutingKey]}},
+ case rabbit_exchange:route(Exchange, Delivery) of
+ [] ->
+ {ok, no_route};
+ Routes ->
+ {ok,
+ [Stream
+ || #resource{name = Stream} = R <- Routes,
+ is_resource_stream_queue(R)]}
+ end
+ catch
+ exit:Error ->
+ rabbit_log:error("Error while looking up exchange ~p, ~p",
+ [ExchangeName, Error]),
{error, stream_not_found}
end,
{reply, Res, State};
+handle_call({partitions, VirtualHost, SuperStream}, _From, State) ->
+ Res = super_stream_partitions(VirtualHost, SuperStream),
+ {reply, Res, State};
handle_call(which_children, _From, State) ->
{reply, [], State}.
@@ -250,13 +436,382 @@ handle_cast(_, State) ->
{noreply, State}.
handle_info(Info, State) ->
- rabbit_log:info("Received info ~p~n", [Info]),
+ rabbit_log:info("Received info ~p", [Info]),
{noreply, State}.
+create_stream(VirtualHost, Reference, Arguments, Username) ->
+ Name =
+ #resource{virtual_host = VirtualHost,
+ kind = queue,
+ name = Reference},
+ StreamQueueArguments = stream_queue_arguments(Arguments),
+ case validate_stream_queue_arguments(StreamQueueArguments) of
+ ok ->
+ Q0 = amqqueue:new(Name,
+ none,
+ true,
+ false,
+ none,
+ StreamQueueArguments,
+ VirtualHost,
+ #{user => Username},
+ rabbit_stream_queue),
+ try
+ QueueLookup =
+ rabbit_amqqueue:with(Name,
+ fun(Q) ->
+ ok =
+ rabbit_amqqueue:assert_equivalence(Q,
+ true,
+ false,
+ StreamQueueArguments,
+ none)
+ end),
+
+ case QueueLookup of
+ ok ->
+ {error, reference_already_exists};
+ {error, not_found} ->
+ try
+ case rabbit_queue_type:declare(Q0, node()) of
+ {new, Q} ->
+ {ok, amqqueue:get_type_state(Q)};
+ {existing, _} ->
+ {error, reference_already_exists};
+ {error, Err} ->
+ rabbit_log:warning("Error while creating ~p stream, ~p",
+ [Reference, Err]),
+ {error, internal_error}
+ end
+ catch
+ exit:Error ->
+ rabbit_log:error("Error while creating ~p stream, ~p",
+ [Reference, Error]),
+ {error, internal_error}
+ end;
+ {error, {absent, _, Reason}} ->
+ rabbit_log:error("Error while creating ~p stream, ~p",
+ [Reference, Reason]),
+ {error, internal_error}
+ end
+ catch
+ exit:ExitError ->
+ % likely to be a problem of inequivalent args on an existing stream
+ rabbit_log:error("Error while creating ~p stream: ~p",
+ [Reference, ExitError]),
+ {error, validation_failed}
+ end;
+ error ->
+ {error, validation_failed}
+ end.
+
+delete_stream(VirtualHost, Reference, Username) ->
+ Name =
+ #resource{virtual_host = VirtualHost,
+ kind = queue,
+ name = Reference},
+ rabbit_log:debug("Trying to delete stream ~p", [Reference]),
+ case rabbit_amqqueue:lookup(Name) of
+ {ok, Q} ->
+ rabbit_log:debug("Found queue record ~p, checking if it is a stream",
+ [Reference]),
+ case is_stream_queue(Q) of
+ true ->
+ rabbit_log:debug("Queue record ~p is a stream, trying to delete it",
+ [Reference]),
+ {ok, _} =
+ rabbit_stream_queue:delete(Q, false, false, Username),
+ rabbit_log:debug("Stream ~p deleted", [Reference]),
+ {ok, deleted};
+ _ ->
+ rabbit_log:debug("Queue record ~p is NOT a stream, returning error",
+ [Reference]),
+ {error, reference_not_found}
+ end;
+ {error, not_found} ->
+ rabbit_log:debug("Stream ~p not found, cannot delete it",
+ [Reference]),
+ {error, reference_not_found}
+ end.
+
+super_stream_partitions(VirtualHost, SuperStream) ->
+ ExchangeName = rabbit_misc:r(VirtualHost, exchange, SuperStream),
+ try
+ rabbit_exchange:lookup_or_die(ExchangeName),
+ UnorderedBindings =
+ [Binding
+ || Binding = #binding{destination = D}
+ <- rabbit_binding:list_for_source(ExchangeName),
+ is_resource_stream_queue(D)],
+ OrderedBindings =
+ rabbit_stream_utils:sort_partitions(UnorderedBindings),
+ {ok,
+ lists:foldl(fun (#binding{destination =
+ #resource{kind = queue, name = Q}},
+ Acc) ->
+ Acc ++ [Q];
+ (_Binding, Acc) ->
+ Acc
+ end,
+ [], OrderedBindings)}
+ catch
+ exit:Error ->
+ rabbit_log:error("Error while looking up exchange ~p, ~p",
+ [ExchangeName, Error]),
+ {error, stream_not_found}
+ end.
+
+validate_super_stream_creation(VirtualHost, Name, Partitions) ->
+ case exchange_exists(VirtualHost, Name) of
+ {error, validation_failed} ->
+ {error,
+ {validation_failed,
+ rabbit_misc:format("~s is not a correct name for a super stream",
+ [Name])}};
+ {ok, true} ->
+ {error,
+ {reference_already_exists,
+ rabbit_misc:format("there is already an exchange named ~s",
+ [Name])}};
+ {ok, false} ->
+ case check_already_existing_queue(VirtualHost, Partitions) of
+ {error, Reason} ->
+ {error, Reason};
+ ok ->
+ ok
+ end
+ end.
+
+exchange_exists(VirtualHost, Name) ->
+ case rabbit_stream_utils:enforce_correct_name(Name) of
+ {ok, CorrectName} ->
+ ExchangeName = rabbit_misc:r(VirtualHost, exchange, CorrectName),
+ case rabbit_exchange:lookup(ExchangeName) of
+ {ok, _} ->
+ {ok, true};
+ {error, not_found} ->
+ {ok, false}
+ end;
+ error ->
+ {error, validation_failed}
+ end.
+
+queue_exists(VirtualHost, Name) ->
+ case rabbit_stream_utils:enforce_correct_name(Name) of
+ {ok, CorrectName} ->
+ QueueName = rabbit_misc:r(VirtualHost, queue, CorrectName),
+ case rabbit_amqqueue:lookup(QueueName) of
+ {ok, _} ->
+ {ok, true};
+ {error, not_found} ->
+ {ok, false}
+ end;
+ error ->
+ {error, validation_failed}
+ end.
+
+check_already_existing_queue(VirtualHost, Queues) ->
+ check_already_existing_queue0(VirtualHost, Queues, undefined).
+
+check_already_existing_queue0(_VirtualHost, [], undefined) ->
+ ok;
+check_already_existing_queue0(VirtualHost, [Q | T], _Error) ->
+ case queue_exists(VirtualHost, Q) of
+ {ok, false} ->
+ check_already_existing_queue0(VirtualHost, T, undefined);
+ {ok, true} ->
+ {error,
+ {reference_already_exists,
+ rabbit_misc:format("there is already a queue named ~s", [Q])}};
+ {error, validation_failed} ->
+ {error,
+ {validation_failed,
+ rabbit_misc:format("~s is not a correct name for a queue", [Q])}}
+ end.
+
+declare_super_stream_exchange(VirtualHost, Name, Username) ->
+ case rabbit_stream_utils:enforce_correct_name(Name) of
+ {ok, CorrectName} ->
+ Args =
+ rabbit_misc:set_table_value([],
+ <<"x-super-stream">>,
+ bool,
+ true),
+ CheckedType = rabbit_exchange:check_type(<<"direct">>),
+ ExchangeName = rabbit_misc:r(VirtualHost, exchange, CorrectName),
+ X = case rabbit_exchange:lookup(ExchangeName) of
+ {ok, FoundX} ->
+ FoundX;
+ {error, not_found} ->
+ rabbit_exchange:declare(ExchangeName,
+ CheckedType,
+ true,
+ false,
+ false,
+ Args,
+ Username)
+ end,
+ try
+ ok =
+ rabbit_exchange:assert_equivalence(X,
+ CheckedType,
+ true,
+ false,
+ false,
+ Args)
+ catch
+ exit:ExitError ->
+ % likely to be a problem of inequivalent args on an existing stream
+ rabbit_log:error("Error while creating ~p super stream exchange: ~p",
+ [Name, ExitError]),
+ {error, validation_failed}
+ end;
+ error ->
+ {error, validation_failed}
+ end.
+
+add_super_stream_bindings(VirtualHost,
+ Name,
+ Partitions,
+ RoutingKeys,
+ Username) ->
+ PartitionsRoutingKeys = lists:zip(Partitions, RoutingKeys),
+ BindingsResult =
+ lists:foldl(fun ({Partition, RoutingKey}, {ok, Order}) ->
+ case add_super_stream_binding(VirtualHost,
+ Name,
+ Partition,
+ RoutingKey,
+ Order,
+ Username)
+ of
+ ok ->
+ {ok, Order + 1};
+ {error, Reason} ->
+ {{error, Reason}, 0}
+ end;
+ (_, {{error, _Reason}, _Order} = Acc) ->
+ Acc
+ end,
+ {ok, 0}, PartitionsRoutingKeys),
+ case BindingsResult of
+ {ok, _} ->
+ ok;
+ {{error, Reason}, _} ->
+ {error, Reason}
+ end.
+
+add_super_stream_binding(VirtualHost,
+ SuperStream,
+ Partition,
+ RoutingKey,
+ Order,
+ Username) ->
+ {ok, ExchangeNameBin} =
+ rabbit_stream_utils:enforce_correct_name(SuperStream),
+ {ok, QueueNameBin} =
+ rabbit_stream_utils:enforce_correct_name(Partition),
+ ExchangeName = rabbit_misc:r(VirtualHost, exchange, ExchangeNameBin),
+ QueueName = rabbit_misc:r(VirtualHost, queue, QueueNameBin),
+ Pid = self(),
+ Arguments =
+ rabbit_misc:set_table_value([],
+ <<"x-stream-partition-order">>,
+ long,
+ Order),
+ case rabbit_binding:add(#binding{source = ExchangeName,
+ destination = QueueName,
+ key = RoutingKey,
+ args = Arguments},
+ fun (_X, Q) when ?is_amqqueue(Q) ->
+ try
+ rabbit_amqqueue:check_exclusive_access(Q,
+ Pid)
+ catch
+ exit:Reason ->
+ {error, Reason}
+ end;
+ (_X, #exchange{}) ->
+ ok
+ end,
+ Username)
+ of
+ {error, {resources_missing, [{not_found, Name} | _]}} ->
+ {error,
+ {stream_not_found,
+ rabbit_misc:format("stream ~s does not exists", [Name])}};
+ {error, {resources_missing, [{absent, Q, _Reason} | _]}} ->
+ {error,
+ {stream_not_found,
+ rabbit_misc:format("stream ~s does not exists (absent)", [Q])}};
+ {error, binding_not_found} ->
+ {error,
+ {not_found,
+ rabbit_misc:format("no binding ~s between ~s and ~s",
+ [RoutingKey, rabbit_misc:rs(ExchangeName),
+ rabbit_misc:rs(QueueName)])}};
+ {error, {binding_invalid, Fmt, Args}} ->
+ {error, {binding_invalid, rabbit_misc:format(Fmt, Args)}};
+ {error, #amqp_error{} = Error} ->
+ {error, {internal_error, rabbit_misc:format("~p", [Error])}};
+ ok ->
+ ok
+ end.
+
+delete_super_stream_exchange(VirtualHost, Name, Username) ->
+ case rabbit_stream_utils:enforce_correct_name(Name) of
+ {ok, CorrectName} ->
+ ExchangeName = rabbit_misc:r(VirtualHost, exchange, CorrectName),
+ case rabbit_exchange:delete(ExchangeName, false, Username) of
+ {error, not_found} ->
+ ok;
+ ok ->
+ ok
+ end;
+ error ->
+ {error, validation_failed}
+ end.
+
+leader_from_members(Q) ->
+ QState = amqqueue:get_type_state(Q),
+ #{name := StreamName} = QState,
+ case rabbit_stream_coordinator:members(StreamName) of
+ {ok, Members} ->
+ maps:fold(fun (_LeaderNode, {Pid, writer}, _Acc) ->
+ {ok, Pid};
+ (_Node, _, Acc) ->
+ Acc
+ end,
+ {error, not_found}, Members);
+ _ ->
+ {error, not_found}
+ end.
+
+process_alive(Pid) ->
+ CurrentNode = node(),
+ case node(Pid) of
+ nonode@nohost ->
+ false;
+ CurrentNode ->
+ is_process_alive(Pid);
+ OtherNode ->
+ rpc:call(OtherNode, erlang, is_process_alive, [Pid], 10000)
+ end.
+
is_stream_queue(Q) ->
case amqqueue:get_type(Q) of
rabbit_stream_queue ->
true;
_ ->
false
- end. \ No newline at end of file
+ end.
+
+is_resource_stream_queue(#resource{kind = queue} = Resource) ->
+ case rabbit_amqqueue:lookup(Resource) of
+ {ok, Q} ->
+ is_stream_queue(Q);
+ _ ->
+ false
+ end;
+is_resource_stream_queue(_) ->
+ false.
diff --git a/deps/rabbitmq_stream/src/rabbit_stream_metrics.erl b/deps/rabbitmq_stream/src/rabbit_stream_metrics.erl
new file mode 100644
index 0000000000..09616146e8
--- /dev/null
+++ b/deps/rabbitmq_stream/src/rabbit_stream_metrics.erl
@@ -0,0 +1,138 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 2.0 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/en-US/MPL/2.0/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is Pivotal Software, Inc.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_metrics).
+
+-include("rabbit_stream_metrics.hrl").
+
+%% API
+-export([init/0]).
+-export([consumer_created/8,
+ consumer_updated/8,
+ consumer_cancelled/3]).
+-export([publisher_created/4,
+ publisher_updated/7,
+ publisher_deleted/3]).
+
+-define(CTAG_PREFIX, <<"stream.subid-">>).
+
+init() ->
+ rabbit_core_metrics:create_table({?TABLE_CONSUMER, set}),
+ rabbit_core_metrics:create_table({?TABLE_PUBLISHER, set}),
+ ok.
+
+consumer_created(Connection,
+ StreamResource,
+ SubscriptionId,
+ Credits,
+ MessageCount,
+ Offset,
+ OffsetLag,
+ Properties) ->
+ Values =
+ [{credits, Credits},
+ {consumed, MessageCount},
+ {offset, Offset},
+ {offset_lag, OffsetLag},
+ {properties, Properties}],
+ ets:insert(?TABLE_CONSUMER,
+ {{StreamResource, Connection, SubscriptionId}, Values}),
+ rabbit_global_counters:consumer_created(stream),
+ rabbit_core_metrics:consumer_created(Connection,
+ consumer_tag(SubscriptionId),
+ false,
+ false,
+ StreamResource,
+ 0,
+ true,
+ up,
+ rabbit_misc:to_amqp_table(Properties)),
+ ok.
+
+consumer_tag(SubscriptionId) ->
+ SubIdBinary = rabbit_data_coercion:to_binary(SubscriptionId),
+ <<?CTAG_PREFIX/binary, SubIdBinary/binary>>.
+
+consumer_updated(Connection,
+ StreamResource,
+ SubscriptionId,
+ Credits,
+ MessageCount,
+ Offset,
+ OffsetLag,
+ Properties) ->
+ Values =
+ [{credits, Credits},
+ {consumed, MessageCount},
+ {offset, Offset},
+ {offset_lag, OffsetLag},
+ {properties, Properties}],
+ ets:insert(?TABLE_CONSUMER,
+ {{StreamResource, Connection, SubscriptionId}, Values}),
+ ok.
+
+consumer_cancelled(Connection, StreamResource, SubscriptionId) ->
+ ets:delete(?TABLE_CONSUMER,
+ {StreamResource, Connection, SubscriptionId}),
+ rabbit_global_counters:consumer_deleted(stream),
+ rabbit_core_metrics:consumer_deleted(Connection,
+ consumer_tag(SubscriptionId),
+ StreamResource),
+ rabbit_event:notify(consumer_deleted,
+ [{consumer_tag, consumer_tag(SubscriptionId)},
+ {channel, self()}, {queue, StreamResource}]),
+ ok.
+
+publisher_created(Connection,
+ StreamResource,
+ PublisherId,
+ Reference) ->
+ Values =
+ [{reference, format_publisher_reference(Reference)},
+ {published, 0},
+ {confirmed, 0},
+ {errored, 0}],
+ rabbit_global_counters:publisher_created(stream),
+ ets:insert(?TABLE_PUBLISHER,
+ {{StreamResource, Connection, PublisherId}, Values}),
+ ok.
+
+publisher_updated(Connection,
+ StreamResource,
+ PublisherId,
+ Reference,
+ Published,
+ Confirmed,
+ Errored) ->
+ Values =
+ [{reference, format_publisher_reference(Reference)},
+ {published, Published},
+ {confirmed, Confirmed},
+ {errored, Errored}],
+ ets:insert(?TABLE_PUBLISHER,
+ {{StreamResource, Connection, PublisherId}, Values}),
+ ok.
+
+publisher_deleted(Connection, StreamResource, PublisherId) ->
+ ets:delete(?TABLE_PUBLISHER,
+ {StreamResource, Connection, PublisherId}),
+ rabbit_global_counters:publisher_deleted(stream),
+ ok.
+
+format_publisher_reference(undefined) ->
+ <<"">>;
+format_publisher_reference(Ref) when is_binary(Ref) ->
+ Ref.
diff --git a/deps/rabbitmq_stream/src/rabbit_stream_metrics_gc.erl b/deps/rabbitmq_stream/src/rabbit_stream_metrics_gc.erl
new file mode 100644
index 0000000000..bba4efb430
--- /dev/null
+++ b/deps/rabbitmq_stream/src/rabbit_stream_metrics_gc.erl
@@ -0,0 +1,82 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 2.0 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/en-US/MPL/2.0/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is Pivotal Software, Inc.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_metrics_gc).
+
+-behaviour(gen_server).
+
+-include_lib("rabbitmq_stream/include/rabbit_stream_metrics.hrl").
+
+-record(state, {timer, interval}).
+
+-export([start_link/0]).
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3]).
+
+-spec start_link() -> rabbit_types:ok_pid_or_error().
+start_link() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+init(_) ->
+ Interval =
+ rabbit_misc:get_env(rabbit, core_metrics_gc_interval, 120000),
+ {ok, start_timer(#state{interval = Interval})}.
+
+handle_call(which_children, _From, State) ->
+ {reply, [], State}.
+
+handle_cast(_Request, State) ->
+ {noreply, State}.
+
+handle_info(start_gc, State) ->
+ GbSet =
+ gb_sets:from_list(
+ rabbit_amqqueue:list_names()),
+ gc_process_and_entity(?TABLE_CONSUMER, GbSet),
+ gc_process_and_entity(?TABLE_PUBLISHER, GbSet),
+ {noreply, start_timer(State)}.
+
+terminate(_Reason, #state{timer = TRef}) ->
+ erlang:cancel_timer(TRef),
+ ok.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+start_timer(#state{interval = Interval} = St) ->
+ TRef = erlang:send_after(Interval, self(), start_gc),
+ St#state{timer = TRef}.
+
+gc_process_and_entity(Table, GbSet) ->
+ ets:foldl(fun({{Id, Pid, _} = Key, _}, none) ->
+ gc_process_and_entity(Id, Pid, Table, Key, GbSet)
+ end,
+ none, Table).
+
+gc_process_and_entity(Id, Pid, Table, Key, GbSet) ->
+ case rabbit_misc:is_process_alive(Pid)
+ andalso gb_sets:is_member(Id, GbSet)
+ of
+ true ->
+ none;
+ false ->
+ ets:delete(Table, Key),
+ none
+ end.
diff --git a/deps/rabbitmq_stream/src/rabbit_stream_reader.erl b/deps/rabbitmq_stream/src/rabbit_stream_reader.erl
index d3b4820256..9c29090f4a 100644
--- a/deps/rabbitmq_stream/src/rabbit_stream_reader.erl
+++ b/deps/rabbitmq_stream/src/rabbit_stream_reader.erl
@@ -1,6 +1,4 @@
%% The contents of this file are subject to the Mozilla Public License
-%% Version 2.0 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
%% at https://www.mozilla.org/en-US/MPL/2.0/
%%
%% Software distributed under the License is distributed on an "AS IS"
@@ -11,99 +9,198 @@
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is Pivotal Software, Inc.
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream_reader).
+-behaviour(gen_statem).
+
-include_lib("rabbit_common/include/rabbit.hrl").
--include("rabbit_stream.hrl").
-
--record(consumer, {
- socket :: rabbit_net:socket(), %% ranch_transport:socket(),
- member_pid :: pid(),
- offset :: osiris:offset(),
- subscription_id :: integer(),
- segment :: osiris_log:state(),
- credit :: integer(),
- stream :: binary()
-}).
-
--record(stream_connection_state, {
- data :: 'none' | binary(),
- blocked :: boolean(),
- consumers :: #{integer() => #consumer{}}
-}).
-
--record(stream_connection, {
- name :: string(),
- %% server host
- host,
- %% client host
- peer_host,
- %% server port
- port,
- %% client port
- peer_port,
- auth_mechanism,
- connected_at :: integer(),
- helper_sup :: pid(),
- socket :: rabbit_net:socket(),
- stream_leaders :: #{binary() => pid()},
- stream_subscriptions :: #{binary() => [integer()]},
- credits :: atomics:atomics_ref(),
- authentication_state :: atom(),
- user :: 'undefined' | #user{},
- virtual_host :: 'undefined' | binary(),
- connection_step :: atom(), % tcp_connected, peer_properties_exchanged, authenticating, authenticated, tuning, tuned, opened, failure, closing, closing_done
- frame_max :: integer(),
- heartbeat :: integer(),
- heartbeater :: any(),
- client_properties = #{} :: #{binary() => binary()},
- monitors = #{} :: #{reference() => binary()},
- stats_timer :: reference(),
- send_file_oct :: atomics:atomics_ref()
-}).
-
--record(configuration, {
- initial_credits :: integer(),
- credits_required_for_unblocking :: integer(),
- frame_max :: integer(),
- heartbeat :: integer()
-}).
-
--define(RESPONSE_FRAME_SIZE, 10). % 2 (key) + 2 (version) + 4 (correlation ID) + 2 (response code)
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
+
+-include("rabbit_stream_metrics.hrl").
+
+-type stream() :: binary().
+-type publisher_id() :: byte().
+-type publisher_reference() :: binary().
+-type subscription_id() :: byte().
+
+-record(publisher,
+ {publisher_id :: publisher_id(),
+ stream :: stream(),
+ reference :: undefined | publisher_reference(),
+ leader :: pid(),
+ message_counters :: atomics:atomics_ref()}).
+-record(consumer_configuration,
+ {socket :: rabbit_net:socket(), %% ranch_transport:socket(),
+ member_pid :: pid(),
+ subscription_id :: subscription_id(),
+ stream :: stream(),
+ offset :: osiris:offset(),
+ counters :: atomics:atomics_ref(),
+ properties :: map()}).
+-record(consumer,
+ {configuration :: #consumer_configuration{}, credit :: integer(),
+ log :: osiris_log:state()}).
+-record(stream_connection_state,
+ {data :: rabbit_stream_core:state(), blocked :: boolean(),
+ consumers :: #{subscription_id() => #consumer{}}}).
+-record(stream_connection,
+ {name :: binary(),
+ %% server host
+ host,
+ %% client host
+ peer_host,
+ %% server port
+ port,
+ %% client port
+ peer_port,
+ auth_mechanism,
+ connected_at :: integer(),
+ helper_sup :: pid(),
+ socket :: rabbit_net:socket(),
+ publishers ::
+ #{publisher_id() =>
+ #publisher{}}, %% FIXME replace with a list (0-255 lookup faster?)
+ publisher_to_ids ::
+ #{{stream(), publisher_reference()} => publisher_id()},
+ stream_leaders :: #{stream() => pid()},
+ stream_subscriptions :: #{stream() => [subscription_id()]},
+ credits :: atomics:atomics_ref(),
+ authentication_state :: atom(),
+ user :: undefined | #user{},
+ virtual_host :: undefined | binary(),
+ connection_step ::
+ atom(), % tcp_connected, peer_properties_exchanged, authenticating, authenticated, tuning, tuned, opened, failure, closing, closing_done
+ frame_max :: integer(),
+ heartbeat :: undefined | integer(),
+ heartbeater :: any(),
+ client_properties = #{} :: #{binary() => binary()},
+ monitors = #{} :: #{reference() => stream()},
+ stats_timer :: undefined | reference(),
+ resource_alarm :: boolean(),
+ send_file_oct ::
+ atomics:atomics_ref(), % number of bytes sent with send_file (for metrics)
+ transport :: tcp | ssl,
+ proxy_socket :: undefined | ranch_proxy:proxy_socket()}).
+-record(configuration,
+ {initial_credits :: integer(),
+ credits_required_for_unblocking :: integer(),
+ frame_max :: integer(),
+ heartbeat :: integer(),
+ connection_negotiation_step_timeout :: integer()}).
+-record(statem_data,
+ {transport :: module(),
+ connection :: #stream_connection{},
+ connection_state :: #stream_connection_state{},
+ config :: #configuration{}}).
+
-define(CREATION_EVENT_KEYS,
- [pid, name, port, peer_port, host,
- peer_host, ssl, peer_cert_subject, peer_cert_issuer,
- peer_cert_validity, auth_mechanism, ssl_protocol,
- ssl_key_exchange, ssl_cipher, ssl_hash, protocol, user, vhost,
- timeout, frame_max, channel_max, client_properties, connected_at,
- node, user_who_performed_action]).
+ [pid,
+ name,
+ port,
+ peer_port,
+ host,
+ peer_host,
+ ssl,
+ peer_cert_subject,
+ peer_cert_issuer,
+ peer_cert_validity,
+ auth_mechanism,
+ ssl_protocol,
+ ssl_key_exchange,
+ ssl_cipher,
+ ssl_hash,
+ protocol,
+ user,
+ vhost,
+ protocol,
+ timeout,
+ frame_max,
+ channel_max,
+ client_properties,
+ connected_at,
+ node,
+ user_who_performed_action]).
-define(SIMPLE_METRICS, [pid, recv_oct, send_oct, reductions]).
--define(OTHER_METRICS, [recv_cnt, send_cnt, send_pend, state, channels, garbage_collection,
- timeout]).
+-define(OTHER_METRICS,
+ [recv_cnt,
+ send_cnt,
+ send_pend,
+ state,
+ channels,
+ garbage_collection,
+ timeout]).
-define(AUTH_NOTIFICATION_INFO_KEYS,
- [host, name, peer_host, peer_port, protocol, auth_mechanism,
- ssl, ssl_protocol, ssl_cipher, peer_cert_issuer, peer_cert_subject,
- peer_cert_validity]).
-
-%% API
--export([start_link/4, init/1, info/2]).
+ [host,
+ name,
+ peer_host,
+ peer_port,
+ protocol,
+ auth_mechanism,
+ ssl,
+ ssl_protocol,
+ ssl_cipher,
+ peer_cert_issuer,
+ peer_cert_subject,
+ peer_cert_validity]).
+
+%% client API
+-export([start_link/4,
+ info/2,
+ consumers_info/2,
+ publishers_info/2,
+ in_vhost/2]).
+-export([resource_alarm/3]).
+%% gen_statem callbacks
+-export([callback_mode/0,
+ terminate/3,
+ init/1,
+ tcp_connected/3,
+ peer_properties_exchanged/3,
+ authenticating/3,
+ tuning/3,
+ tuned/3,
+ open/3,
+ close_sent/3]).
+
+ %% not called by gen_statem since gen_statem:enter_loop/4 is used
+
+ %% states
+
+callback_mode() ->
+ [state_functions, state_enter].
+
+terminate(Reason, State,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = Socket},
+ connection_state = ConnectionState} =
+ StatemData) ->
+ close(Transport, Socket, ConnectionState),
+ rabbit_networking:unregister_non_amqp_connection(self()),
+ notify_connection_closed(StatemData),
+ rabbit_log:debug("~s terminating in state '~s' with reason '~W'",
+ [?MODULE, State, Reason, 10]).
start_link(KeepaliveSup, Transport, Ref, Opts) ->
- Pid = proc_lib:spawn_link(?MODULE, init,
- [[KeepaliveSup, Transport, Ref, Opts]]),
-
- {ok, Pid}.
-
-init([KeepaliveSup, Transport, Ref, #{initial_credits := InitialCredits,
- credits_required_for_unblocking := CreditsRequiredBeforeUnblocking,
- frame_max := FrameMax,
- heartbeat := Heartbeat}]) ->
+ {ok,
+ proc_lib:spawn_link(?MODULE, init,
+ [[KeepaliveSup, Transport, Ref, Opts]])}.
+
+init([KeepaliveSup,
+ Transport,
+ Ref,
+ #{initial_credits := InitialCredits,
+ credits_required_for_unblocking := CreditsRequiredBeforeUnblocking,
+ frame_max := FrameMax,
+ heartbeat := Heartbeat,
+ transport := ConnTransport}]) ->
process_flag(trap_exit, true),
- {ok, Sock} = rabbit_networking:handshake(Ref,
- application:get_env(rabbitmq_stream, proxy_protocol, false)),
+ {ok, Sock} =
+ rabbit_networking:handshake(Ref,
+ application:get_env(rabbitmq_stream,
+ proxy_protocol, false)),
RealSocket = rabbit_net:unwrap_socket(Sock),
case rabbit_net:connection_string(Sock, inbound) of
{ok, ConnStr} ->
@@ -112,50 +209,370 @@ init([KeepaliveSup, Transport, Ref, #{initial_credits := InitialCredits,
atomics:put(SendFileOct, 1, 0),
init_credit(Credits, InitialCredits),
{PeerHost, PeerPort, Host, Port} =
- socket_op(Sock, fun (S) -> rabbit_net:socket_ends(S, inbound) end),
- Connection = #stream_connection{
- name = rabbit_data_coercion:to_binary(ConnStr),
- host = Host,
- peer_host = PeerHost,
- port = Port,
- peer_port = PeerPort,
- connected_at = os:system_time(milli_seconds),
- auth_mechanism = none,
- helper_sup = KeepaliveSup,
- socket = RealSocket,
- stream_leaders = #{},
- stream_subscriptions = #{},
- credits = Credits,
- authentication_state = none,
- connection_step = tcp_connected,
- frame_max = FrameMax,
- send_file_oct = SendFileOct},
- State = #stream_connection_state{
- consumers = #{}, blocked = false, data = none
- },
+ socket_op(Sock,
+ fun(S) -> rabbit_net:socket_ends(S, inbound) end),
+ Connection =
+ #stream_connection{name =
+ rabbit_data_coercion:to_binary(ConnStr),
+ host = Host,
+ peer_host = PeerHost,
+ port = Port,
+ peer_port = PeerPort,
+ connected_at = os:system_time(milli_seconds),
+ auth_mechanism = none,
+ helper_sup = KeepaliveSup,
+ socket = RealSocket,
+ publishers = #{},
+ publisher_to_ids = #{},
+ stream_leaders = #{},
+ stream_subscriptions = #{},
+ credits = Credits,
+ authentication_state = none,
+ connection_step = tcp_connected,
+ frame_max = FrameMax,
+ resource_alarm = false,
+ send_file_oct = SendFileOct,
+ transport = ConnTransport,
+ proxy_socket =
+ rabbit_net:maybe_get_proxy_socket(Sock)},
+ State =
+ #stream_connection_state{consumers = #{},
+ blocked = false,
+ data =
+ rabbit_stream_core:init(undefined)},
Transport:setopts(RealSocket, [{active, once}]),
-
- listen_loop_pre_auth(Transport, Connection, State, #configuration{
- initial_credits = InitialCredits,
- credits_required_for_unblocking = CreditsRequiredBeforeUnblocking,
- frame_max = FrameMax,
- heartbeat = Heartbeat
- });
+ rabbit_alarm:register(self(), {?MODULE, resource_alarm, []}),
+ ConnectionNegotiationStepTimeout =
+ application:get_env(rabbitmq_stream,
+ connection_negotiation_step_timeout,
+ 10_000),
+ % gen_statem process has its start_link call not return until the init function returns.
+ % This is problematic, because we won't be able to call ranch:handshake/2
+ % from the init callback as this would cause a deadlock to happen.
+ % Therefore, we use the gen_statem:enter_loop/4 function.
+ % See https://ninenines.eu/docs/en/ranch/2.0/guide/protocols/
+ gen_statem:enter_loop(?MODULE,
+ [],
+ tcp_connected,
+ #statem_data{transport = Transport,
+ connection = Connection,
+ connection_state = State,
+ config =
+ #configuration{initial_credits
+ =
+ InitialCredits,
+ credits_required_for_unblocking
+ =
+ CreditsRequiredBeforeUnblocking,
+ frame_max =
+ FrameMax,
+ heartbeat =
+ Heartbeat,
+ connection_negotiation_step_timeout
+ =
+ ConnectionNegotiationStepTimeout}});
{Error, Reason} ->
rabbit_net:fast_close(RealSocket),
- rabbit_log:warning("Closing connection because of ~p ~p~n", [Error, Reason])
+ rabbit_log_connection:warning("Closing connection because of ~p ~p",
+ [Error, Reason])
end.
+tcp_connected(enter, _OldState,
+ #statem_data{config =
+ #configuration{connection_negotiation_step_timeout
+ = StateTimeout}}) ->
+ {keep_state_and_data, {state_timeout, StateTimeout, close}};
+tcp_connected(state_timeout, close,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = Socket}}) ->
+ state_timeout(?FUNCTION_NAME, Transport, Socket);
+tcp_connected(info, Msg, StateData) ->
+ handle_info(Msg, StateData,
+ fun(NextConnectionStep,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = S}} =
+ StatemData,
+ NewConnection,
+ NewConnectionState) ->
+ if NextConnectionStep =:= peer_properties_exchanged ->
+ {next_state, peer_properties_exchanged,
+ StatemData#statem_data{connection = NewConnection,
+ connection_state =
+ NewConnectionState}};
+ true ->
+ invalid_transition(Transport,
+ S,
+ ?FUNCTION_NAME,
+ NextConnectionStep)
+ end
+ end).
+
+peer_properties_exchanged(enter, _OldState,
+ #statem_data{config =
+ #configuration{connection_negotiation_step_timeout
+ =
+ StateTimeout}}) ->
+ {keep_state_and_data, {state_timeout, StateTimeout, close}};
+peer_properties_exchanged(state_timeout, close,
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{socket =
+ Socket}}) ->
+ state_timeout(?FUNCTION_NAME, Transport, Socket);
+peer_properties_exchanged(info, Msg, StateData) ->
+ handle_info(Msg, StateData,
+ fun(NextConnectionStep,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = S}} =
+ StatemData,
+ NewConnection,
+ NewConnectionState) ->
+ if NextConnectionStep =:= authenticating ->
+ {next_state, authenticating,
+ StatemData#statem_data{connection = NewConnection,
+ connection_state =
+ NewConnectionState}};
+ true ->
+ invalid_transition(Transport,
+ S,
+ ?FUNCTION_NAME,
+ NextConnectionStep)
+ end
+ end).
+
+authenticating(enter, _OldState,
+ #statem_data{config =
+ #configuration{connection_negotiation_step_timeout
+ = StateTimeout}}) ->
+ {keep_state_and_data, {state_timeout, StateTimeout, close}};
+authenticating(state_timeout, close,
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{socket = Socket}}) ->
+ state_timeout(?FUNCTION_NAME, Transport, Socket);
+authenticating(info, Msg, StateData) ->
+ handle_info(Msg, StateData,
+ fun(NextConnectionStep,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = S},
+ config =
+ #configuration{frame_max = FrameMax,
+ heartbeat = Heartbeat}} =
+ StatemData,
+ NewConnection,
+ NewConnectionState) ->
+ if NextConnectionStep =:= authenticated ->
+ Frame =
+ rabbit_stream_core:frame({tune, FrameMax,
+ Heartbeat}),
+ send(Transport, S, Frame),
+ {next_state, tuning,
+ StatemData#statem_data{connection =
+ NewConnection#stream_connection{connection_step
+ =
+ tuning},
+ connection_state =
+ NewConnectionState}};
+ true ->
+ invalid_transition(Transport,
+ S,
+ ?FUNCTION_NAME,
+ NextConnectionStep)
+ end
+ end).
+
+tuning(enter, _OldState,
+ #statem_data{config =
+ #configuration{connection_negotiation_step_timeout =
+ StateTimeout}}) ->
+ {keep_state_and_data, {state_timeout, StateTimeout, close}};
+tuning(state_timeout, close,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = Socket}}) ->
+ state_timeout(?FUNCTION_NAME, Transport, Socket);
+tuning(info, Msg, StateData) ->
+ handle_info(Msg, StateData,
+ fun(NextConnectionStep,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = S},
+ config = Configuration} =
+ StatemData,
+ NewConnection,
+ NewConnectionState) ->
+ case NextConnectionStep of
+ tuned ->
+ {next_state, tuned,
+ StatemData#statem_data{connection = NewConnection,
+ connection_state =
+ NewConnectionState}};
+ opened ->
+ transition_to_opened(Transport,
+ Configuration,
+ NewConnection,
+ NewConnectionState);
+ _ ->
+ invalid_transition(Transport,
+ S,
+ ?FUNCTION_NAME,
+ NextConnectionStep)
+ end
+ end).
+
+tuned(enter, _OldState,
+ #statem_data{config =
+ #configuration{connection_negotiation_step_timeout =
+ StateTimeout}}) ->
+ {keep_state_and_data, {state_timeout, StateTimeout, close}};
+tuned(state_timeout, close,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = Socket}}) ->
+ state_timeout(?FUNCTION_NAME, Transport, Socket);
+tuned(info, Msg, StateData) ->
+ handle_info(Msg, StateData,
+ fun(NextConnectionStep,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = S},
+ config = Configuration},
+ NewConnection,
+ NewConnectionState) ->
+ if NextConnectionStep =:= opened ->
+ transition_to_opened(Transport,
+ Configuration,
+ NewConnection,
+ NewConnectionState);
+ true ->
+ invalid_transition(Transport,
+ S,
+ ?FUNCTION_NAME,
+ NextConnectionStep)
+ end
+ end).
+
+state_timeout(State, Transport, Socket) ->
+ rabbit_log_connection:warning("Closing connection because of timeout in state "
+ "'~s' likely due to lack of client action.",
+ [State]),
+ close_immediately(Transport, Socket),
+ stop.
+
+handle_info(Msg,
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{socket = S,
+ connection_step =
+ PreviousConnectionStep} =
+ Connection,
+ connection_state = State} =
+ StatemData,
+ Transition) ->
+ {OK, Closed, Error, _Passive} = Transport:messages(),
+ case Msg of
+ {OK, S, Data} ->
+ {Connection1, State1} =
+ handle_inbound_data_pre_auth(Transport,
+ Connection,
+ State,
+ Data),
+ Transport:setopts(S, [{active, once}]),
+ #stream_connection{connection_step = NewConnectionStep} =
+ Connection1,
+ rabbit_log_connection:debug("Transitioned from ~s to ~s",
+ [PreviousConnectionStep,
+ NewConnectionStep]),
+ Transition(NewConnectionStep, StatemData, Connection1, State1);
+ {Closed, S} ->
+ rabbit_log_connection:warning("Stream protocol connection socket ~w closed",
+ [S]),
+ stop;
+ {Error, S, Reason} ->
+ rabbit_log_connection:warning("Socket error ~p [~w]", [Reason, S]),
+ stop;
+ {resource_alarm, IsThereAlarm} ->
+ {keep_state,
+ StatemData#statem_data{connection =
+ Connection#stream_connection{resource_alarm
+ =
+ IsThereAlarm},
+ connection_state =
+ State#stream_connection_state{blocked =
+ true}}};
+ Unknown ->
+ rabbit_log:warning("Received unknown message ~p", [Unknown]),
+ close_immediately(Transport, S),
+ stop
+ end.
+
+transition_to_opened(Transport,
+ Configuration,
+ NewConnection,
+ NewConnectionState) ->
+ % TODO remove registration to rabbit_stream_connections
+ % just meant to be able to close the connection remotely
+ % should be possible once the connections are available in ctl list_connections
+ pg_local:join(rabbit_stream_connections, self()),
+ Connection1 =
+ rabbit_event:init_stats_timer(NewConnection,
+ #stream_connection.stats_timer),
+ Connection2 = ensure_stats_timer(Connection1),
+ Infos =
+ augment_infos_with_user_provided_connection_name(infos(?CREATION_EVENT_KEYS,
+ Connection2,
+ NewConnectionState),
+ Connection2),
+ rabbit_core_metrics:connection_created(self(), Infos),
+ rabbit_event:notify(connection_created, Infos),
+ rabbit_networking:register_non_amqp_connection(self()),
+ {next_state, open,
+ #statem_data{transport = Transport,
+ connection = Connection2,
+ connection_state = NewConnectionState,
+ config = Configuration}}.
+
+invalid_transition(Transport, Socket, From, To) ->
+ rabbit_log_connection:warning("Closing socket ~w. Invalid transition from ~s "
+ "to ~s.",
+ [Socket, From, To]),
+ close_immediately(Transport, Socket),
+ stop.
+
+resource_alarm(ConnectionPid, disk,
+ {_WasAlarmSetForNode,
+ IsThereAnyAlarmsForSameResourceInTheCluster, _Node}) ->
+ ConnectionPid
+ ! {resource_alarm, IsThereAnyAlarmsForSameResourceInTheCluster},
+ ok;
+resource_alarm(_ConnectionPid, _Resource, _Alert) ->
+ ok.
+
socket_op(Sock, Fun) ->
RealSocket = rabbit_net:unwrap_socket(Sock),
case Fun(Sock) of
- {ok, Res} -> Res;
+ {ok, Res} ->
+ Res;
{error, Reason} ->
- rabbit_log:warning("Error during socket operation ~p~n", [Reason]),
+ rabbit_log_connection:warning("Error during socket operation ~p",
+ [Reason]),
rabbit_net:fast_close(RealSocket),
exit(normal)
end.
+should_unblock(#stream_connection{publishers = Publishers}, _)
+ when map_size(Publishers) == 0 ->
+ %% always unblock a connection without publishers
+ true;
+should_unblock(#stream_connection{credits = Credits,
+ resource_alarm = ResourceAlarm},
+ #configuration{credits_required_for_unblocking =
+ CreditsRequiredForUnblocking}) ->
+ case {ResourceAlarm,
+ has_enough_credits_to_unblock(Credits, CreditsRequiredForUnblocking)}
+ of
+ {true, _} ->
+ false;
+ {false, EnoughCreditsToUnblock} ->
+ EnoughCreditsToUnblock
+ end.
+
init_credit(CreditReference, Credits) ->
atomics:put(CreditReference, 1, Credits).
@@ -168,56 +585,52 @@ add_credits(CreditReference, Credits) ->
has_credits(CreditReference) ->
atomics:get(CreditReference, 1) > 0.
-has_enough_credits_to_unblock(CreditReference, CreditsRequiredForUnblocking) ->
+has_enough_credits_to_unblock(CreditReference,
+ CreditsRequiredForUnblocking) ->
atomics:get(CreditReference, 1) > CreditsRequiredForUnblocking.
-listen_loop_pre_auth(Transport, #stream_connection{socket = S} = Connection, State,
- #configuration{frame_max = FrameMax, heartbeat = Heartbeat} = Configuration) ->
- {OK, Closed, Error} = Transport:messages(),
- %% FIXME introduce timeout to complete the connection opening (after block should be enough)
- receive
- {OK, S, Data} ->
- #stream_connection{connection_step = ConnectionStep0} = Connection,
- {Connection1, State1} = handle_inbound_data_pre_auth(Transport, Connection, State, Data),
- Transport:setopts(S, [{active, once}]),
- #stream_connection{connection_step = ConnectionStep} = Connection1,
- rabbit_log:info("Transitioned from ~p to ~p~n", [ConnectionStep0, ConnectionStep]),
- case ConnectionStep of
- authenticated ->
- TuneFrame = <<?COMMAND_TUNE:16, ?VERSION_0:16, FrameMax:32, Heartbeat:32>>,
- frame(Transport, Connection1, TuneFrame),
- listen_loop_pre_auth(Transport, Connection1#stream_connection{connection_step = tuning}, State1, Configuration);
- opened ->
- % TODO remove registration to rabbit_stream_connections
- % just meant to be able to close the connection remotely
- % should be possible once the connections are available in ctl list_connections
- pg_local:join(rabbit_stream_connections, self()),
- Connection2 = rabbit_event:init_stats_timer(Connection1, #stream_connection.stats_timer),
- Connection3 = ensure_stats_timer(Connection2),
- Infos = augment_infos_with_user_provided_connection_name(
- infos(?CREATION_EVENT_KEYS, Connection3, State1),
- Connection3
- ),
- rabbit_core_metrics:connection_created(self(), Infos),
- rabbit_event:notify(connection_created, Infos),
- rabbit_networking:register_non_amqp_connection(self()),
- listen_loop_post_auth(Transport, Connection3, State1, Configuration);
- failure ->
- close(Transport, S);
- _ ->
- listen_loop_pre_auth(Transport, Connection1, State1, Configuration)
- end;
- {Closed, S} ->
- rabbit_log:info("Socket ~w closed [~w]~n", [S, self()]),
- ok;
- {Error, S, Reason} ->
- rabbit_log:info("Socket error ~p [~w]~n", [Reason, S, self()]);
- M ->
- rabbit_log:warning("Unknown message ~p~n", [M]),
- close(Transport, S)
- end.
+increase_messages_consumed(Counters, Count) ->
+ rabbit_global_counters:messages_delivered(stream, ?STREAM_QUEUE_TYPE,
+ Count),
+ atomics:add(Counters, 1, Count).
+
+set_consumer_offset(Counters, Offset) ->
+ atomics:put(Counters, 2, Offset).
+
+increase_messages_received(Counters, Count) ->
+ rabbit_global_counters:messages_received(stream, Count),
+ rabbit_global_counters:messages_received_confirm(stream, Count),
+ atomics:add(Counters, 1, Count).
-augment_infos_with_user_provided_connection_name(Infos, #stream_connection{client_properties = ClientProperties}) ->
+increase_messages_confirmed(Counters, Count) ->
+ rabbit_global_counters:messages_confirmed(stream, Count),
+ atomics:add(Counters, 2, Count).
+
+increase_messages_errored(Counters, Count) ->
+ atomics:add(Counters, 3, Count).
+
+messages_consumed(Counters) ->
+ atomics:get(Counters, 1).
+
+consumer_offset(Counters) ->
+ atomics:get(Counters, 2).
+
+messages_published(Counters) ->
+ atomics:get(Counters, 1).
+
+messages_confirmed(Counters) ->
+ atomics:get(Counters, 2).
+
+messages_errored(Counters) ->
+ atomics:get(Counters, 3).
+
+stream_stored_offset(Log) ->
+ osiris_log:committed_offset(Log).
+
+augment_infos_with_user_provided_connection_name(Infos,
+ #stream_connection{client_properties
+ =
+ ClientProperties}) ->
case ClientProperties of
#{<<"connection_name">> := UserProvidedConnectionName} ->
[{user_provided_name, UserProvidedConnectionName} | Infos];
@@ -225,405 +638,697 @@ augment_infos_with_user_provided_connection_name(Infos, #stream_connection{clien
Infos
end.
-close(Transport, S) ->
+close(Transport, S,
+ #stream_connection_state{consumers = Consumers}) ->
+ [osiris_log:close(Log)
+ || #consumer{log = Log} <- maps:values(Consumers)],
Transport:shutdown(S, write),
Transport:close(S).
-listen_loop_post_auth(Transport, #stream_connection{socket = S,
- stream_subscriptions = StreamSubscriptions, credits = Credits,
- heartbeater = Heartbeater, monitors = Monitors, client_properties = ClientProperties,
- send_file_oct = SendFileOct} = Connection0,
- #stream_connection_state{consumers = Consumers, blocked = Blocked} = State,
- #configuration{credits_required_for_unblocking = CreditsRequiredForUnblocking} = Configuration) ->
- Connection = ensure_stats_timer(Connection0),
- {OK, Closed, Error} = Transport:messages(),
- receive
- {OK, S, Data} ->
- {Connection1, State1} = handle_inbound_data_post_auth(Transport, Connection, State, Data),
- #stream_connection{connection_step = Step} = Connection1,
- case Step of
- closing ->
- close(Transport, S),
- rabbit_networking:unregister_non_amqp_connection(self()),
- notify_connection_closed(Connection1, State1);
- close_sent ->
- rabbit_log:debug("Transitioned to close_sent ~n"),
- Transport:setopts(S, [{active, once}]),
- listen_loop_post_close(Transport, Connection1, State1, Configuration);
- _ ->
- State2 = case Blocked of
- true ->
- case has_enough_credits_to_unblock(Credits, CreditsRequiredForUnblocking) of
- true ->
- Transport:setopts(S, [{active, once}]),
- ok = rabbit_heartbeat:resume_monitor(Heartbeater),
- State1#stream_connection_state{blocked = false};
- false ->
- State1
- end;
- false ->
- case has_credits(Credits) of
- true ->
- Transport:setopts(S, [{active, once}]),
- State1;
- false ->
- ok = rabbit_heartbeat:pause_monitor(Heartbeater),
- State1#stream_connection_state{blocked = true}
- end
- end,
- listen_loop_post_auth(Transport, Connection1, State2, Configuration)
- end;
- {'DOWN', MonitorRef, process, _OsirisPid, _Reason} ->
- {Connection1, State1} = case Monitors of
- #{MonitorRef := Stream} ->
- Monitors1 = maps:remove(MonitorRef, Monitors),
- C = Connection#stream_connection{monitors = Monitors1},
- case clean_state_after_stream_deletion_or_failure(Stream, C, State) of
- {cleaned, NewConnection, NewState} ->
- StreamSize = byte_size(Stream),
- FrameSize = 2 + 2 + 2 + 2 + StreamSize,
- Transport:send(S, [<<FrameSize:32, ?COMMAND_METADATA_UPDATE:16, ?VERSION_0:16,
- ?RESPONSE_CODE_STREAM_NOT_AVAILABLE:16, StreamSize:16, Stream/binary>>]),
- {NewConnection, NewState};
- {not_cleaned, SameConnection, SameState} ->
- {SameConnection, SameState}
- end;
- _ ->
- {Connection, State}
- end,
- listen_loop_post_auth(Transport, Connection1, State1, Configuration);
- {'$gen_cast', {queue_event, _QueueResource, {osiris_written, _QueueResource, CorrelationList}}} ->
- {FirstPublisherId, _FirstPublishingId} = lists:nth(1, CorrelationList),
- {LastPublisherId, LastPublishingIds, LastCount} = lists:foldl(fun({PublisherId, PublishingId}, {CurrentPublisherId, PublishingIds, Count}) ->
- case PublisherId of
- CurrentPublisherId ->
- {CurrentPublisherId, [PublishingIds, <<PublishingId:64>>], Count + 1};
- OtherPublisherId ->
- FrameSize = 2 + 2 + 1 + 4 + Count * 8,
- %% FIXME enforce max frame size
- %% in practice, this should be necessary only for very large chunks and for very small frame size limits
- Transport:send(S, [<<FrameSize:32, ?COMMAND_PUBLISH_CONFIRM:16, ?VERSION_0:16>>,
- <<CurrentPublisherId:8>>,
- <<Count:32>>, PublishingIds]),
- {OtherPublisherId, <<PublishingId:64>>, 1}
- end
- end, {FirstPublisherId, <<>>, 0}, CorrelationList),
- FrameSize = 2 + 2 + 1 + 4 + LastCount * 8,
- Transport:send(S, [<<FrameSize:32, ?COMMAND_PUBLISH_CONFIRM:16, ?VERSION_0:16>>,
- <<LastPublisherId:8>>,
- <<LastCount:32>>, LastPublishingIds]),
- CorrelationIdCount = length(CorrelationList),
- add_credits(Credits, CorrelationIdCount),
- State1 = case Blocked of
- true ->
- case has_enough_credits_to_unblock(Credits, CreditsRequiredForUnblocking) of
- true ->
- Transport:setopts(S, [{active, once}]),
- ok = rabbit_heartbeat:resume_monitor(Heartbeater),
- State#stream_connection_state{blocked = false};
- false ->
- State
- end;
- false ->
- State
- end,
- listen_loop_post_auth(Transport, Connection, State1, Configuration);
- {'$gen_cast', {queue_event, #resource{name = StreamName}, {osiris_offset, _QueueResource, -1}}} ->
- rabbit_log:info("received osiris offset event for ~p with offset ~p~n", [StreamName, -1]),
- listen_loop_post_auth(Transport, Connection, State, Configuration);
- {'$gen_cast', {queue_event, #resource{name = StreamName}, {osiris_offset, _QueueResource, Offset}}} when Offset > -1 ->
- {Connection1, State1} = case maps:get(StreamName, StreamSubscriptions, undefined) of
- undefined ->
- rabbit_log:info("osiris offset event for ~p, but no subscription (leftover messages after unsubscribe?)", [StreamName]),
- {Connection, State};
- [] ->
- rabbit_log:info("osiris offset event for ~p, but no registered consumers!", [StreamName]),
- {Connection#stream_connection{stream_subscriptions = maps:remove(StreamName, StreamSubscriptions)}, State};
- CorrelationIds when is_list(CorrelationIds) ->
- Consumers1 = lists:foldl(fun(CorrelationId, ConsumersAcc) ->
- #{CorrelationId := Consumer} = ConsumersAcc,
- #consumer{credit = Credit} = Consumer,
- Consumer1 = case Credit of
- 0 ->
- Consumer;
- _ ->
- {{segment, Segment1}, {credit, Credit1}} = send_chunks(
- Transport,
- Consumer,
- SendFileOct
- ),
- Consumer#consumer{segment = Segment1, credit = Credit1}
- end,
- ConsumersAcc#{CorrelationId => Consumer1}
- end,
- Consumers,
- CorrelationIds),
- {Connection, State#stream_connection_state{consumers = Consumers1}}
- end,
- listen_loop_post_auth(Transport, Connection1, State1, Configuration);
- heartbeat_send ->
- Frame = <<?COMMAND_HEARTBEAT:16, ?VERSION_0:16>>,
- case catch frame(Transport, Connection, Frame) of
- ok ->
- listen_loop_post_auth(Transport, Connection, State, Configuration);
- Unexpected ->
- rabbit_log:info("Heartbeat send error ~p, closing connection~n", [Unexpected]),
- C1 = demonitor_all_streams(Connection),
- close(Transport, C1)
- end;
- heartbeat_timeout ->
- rabbit_log:info("Heartbeat timeout, closing connection~n"),
- C1 = demonitor_all_streams(Connection),
- close(Transport, C1);
- {infos, From} ->
- From ! {self(), ClientProperties},
- listen_loop_post_auth(Transport, Connection, State, Configuration);
- {'$gen_call', From, info} ->
- gen_server:reply(From, infos(?INFO_ITEMS, Connection, State)),
- listen_loop_post_auth(Transport, Connection, State, Configuration);
- {'$gen_call', From, {info, Items}} ->
- gen_server:reply(From, infos(Items, Connection, State)),
- listen_loop_post_auth(Transport, Connection, State, Configuration);
- emit_stats ->
- Connection1 = emit_stats(Connection, State),
- listen_loop_post_auth(Transport, Connection1, State, Configuration);
- {'$gen_cast', {force_event_refresh, Ref}} ->
- Infos = augment_infos_with_user_provided_connection_name(
- infos(?CREATION_EVENT_KEYS, Connection, State),
- Connection
- ),
- rabbit_event:notify(connection_created, Infos, Ref),
- Connection1 = rabbit_event:init_stats_timer(Connection, #stream_connection.stats_timer),
- listen_loop_post_auth(Transport, Connection1, State, Configuration);
- {'$gen_call', From, {shutdown, Explanation}} ->
- % likely closing call from the management plugin
- gen_server:reply(From, ok),
- rabbit_log:info("Forcing stream connection ~p closing: ~p~n", [self(), Explanation]),
- demonitor_all_streams(Connection),
- rabbit_networking:unregister_non_amqp_connection(self()),
- notify_connection_closed(Connection, State),
- close(Transport, S),
- ok;
- {Closed, S} ->
- demonitor_all_streams(Connection),
- rabbit_networking:unregister_non_amqp_connection(self()),
- notify_connection_closed(Connection, State),
- rabbit_log:info("Socket ~w closed [~w]~n", [S, self()]),
- ok;
- {Error, S, Reason} ->
- demonitor_all_streams(Connection),
- rabbit_networking:unregister_non_amqp_connection(self()),
- notify_connection_closed(Connection, State),
- rabbit_log:info("Socket error ~p [~w]~n", [Reason, S, self()]);
- M ->
- rabbit_log:warning("Unknown message ~p~n", [M]),
- %% FIXME send close
- listen_loop_post_auth(Transport, Connection, State, Configuration)
- end.
+% Do not read or write any further data from / to Socket.
+% Useful to close sockets for unauthenticated clients.
+close_immediately(Transport, S) ->
+ Transport:shutdown(S, read),
+ Transport:close(S).
-listen_loop_post_close(Transport, #stream_connection{socket = S} = Connection, State, Configuration) ->
- {OK, Closed, Error} = Transport:messages(),
- %% FIXME demonitor streams
- %% FIXME introduce timeout to complete the connection closing (after block should be enough)
- receive
- {OK, S, Data} ->
+open(enter, _OldState, _StateData) ->
+ keep_state_and_data;
+open(info, {resource_alarm, IsThereAlarm},
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{socket = S,
+ name = ConnectionName,
+ credits = Credits,
+ heartbeater = Heartbeater} =
+ Connection,
+ connection_state =
+ #stream_connection_state{blocked = Blocked} = State,
+ config =
+ #configuration{credits_required_for_unblocking =
+ CreditsRequiredForUnblocking}} =
+ StatemData) ->
+ rabbit_log_connection:debug("Connection ~p received resource alarm. Alarm "
+ "on? ~p",
+ [ConnectionName, IsThereAlarm]),
+ EnoughCreditsToUnblock =
+ has_enough_credits_to_unblock(Credits, CreditsRequiredForUnblocking),
+ NewBlockedState =
+ case {IsThereAlarm, EnoughCreditsToUnblock} of
+ {true, _} ->
+ true;
+ {false, EnoughCredits} ->
+ not EnoughCredits
+ end,
+ rabbit_log_connection:debug("Connection ~p had blocked status set to ~p, new "
+ "blocked status is now ~p",
+ [ConnectionName, Blocked, NewBlockedState]),
+ case {Blocked, NewBlockedState} of
+ {true, false} ->
Transport:setopts(S, [{active, once}]),
- {Connection1, State1} = handle_inbound_data_post_close(Transport, Connection, State, Data),
- #stream_connection{connection_step = Step} = Connection1,
- case Step of
- closing_done ->
- rabbit_log:debug("Received close confirmation from client"),
- close(Transport, S),
- rabbit_networking:unregister_non_amqp_connection(self()),
- notify_connection_closed(Connection1, State1);
- _ ->
- Transport:setopts(S, [{active, once}]),
- listen_loop_post_close(Transport, Connection1, State1, Configuration)
- end;
- {Closed, S} ->
- rabbit_networking:unregister_non_amqp_connection(self()),
- notify_connection_closed(Connection, State),
- rabbit_log:info("Socket ~w closed [~w]~n", [S, self()]),
+ ok = rabbit_heartbeat:resume_monitor(Heartbeater),
+ rabbit_log_connection:debug("Unblocking connection ~p",
+ [ConnectionName]);
+ {false, true} ->
+ ok = rabbit_heartbeat:pause_monitor(Heartbeater),
+ rabbit_log_connection:debug("Blocking connection ~p after resource alarm",
+ [ConnectionName]);
+ _ ->
+ ok
+ end,
+ {keep_state,
+ StatemData#statem_data{connection =
+ Connection#stream_connection{resource_alarm =
+ IsThereAlarm},
+ connection_state =
+ State#stream_connection_state{blocked =
+ NewBlockedState}}};
+open(info, {OK, S, Data},
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{socket = S,
+ credits = Credits,
+ heartbeater = Heartbeater} =
+ Connection,
+ connection_state =
+ #stream_connection_state{blocked = Blocked} = State,
+ config = Configuration} =
+ StatemData)
+ when OK =:= tcp; OK =:= ssl ->
+ {Connection1, State1} =
+ handle_inbound_data_post_auth(Transport, Connection, State, Data),
+ #stream_connection{connection_step = Step} = Connection1,
+ case Step of
+ closing ->
+ stop;
+ close_sent ->
+ rabbit_log_connection:debug("Transitioned to close_sent"),
+ Transport:setopts(S, [{active, once}]),
+ {next_state, close_sent,
+ StatemData#statem_data{connection = Connection1,
+ connection_state = State1}};
+ _ ->
+ State2 =
+ case Blocked of
+ true ->
+ case should_unblock(Connection, Configuration) of
+ true ->
+ Transport:setopts(S, [{active, once}]),
+ ok =
+ rabbit_heartbeat:resume_monitor(Heartbeater),
+ State1#stream_connection_state{blocked = false};
+ false ->
+ State1
+ end;
+ false ->
+ case has_credits(Credits) of
+ true ->
+ Transport:setopts(S, [{active, once}]),
+ State1;
+ false ->
+ ok =
+ rabbit_heartbeat:pause_monitor(Heartbeater),
+ State1#stream_connection_state{blocked = true}
+ end
+ end,
+ {keep_state,
+ StatemData#statem_data{connection = Connection1,
+ connection_state = State2}}
+ end;
+open(info, {Closed, Socket}, #statem_data{connection = Connection})
+ when Closed =:= tcp_closed; Closed =:= ssl_closed ->
+ demonitor_all_streams(Connection),
+ rabbit_log_connection:warning("Socket ~w closed [~w]",
+ [Socket, self()]),
+ stop;
+open(info, {Error, Socket, Reason},
+ #statem_data{connection = Connection})
+ when Error =:= tcp_error; Error =:= ssl_error ->
+ demonitor_all_streams(Connection),
+ rabbit_log_connection:error("Socket error ~p [~w] [~w]",
+ [Reason, Socket, self()]),
+ stop;
+open(info, {'DOWN', MonitorRef, process, _OsirisPid, _Reason},
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{socket = S, monitors = Monitors} =
+ Connection,
+ connection_state = State} =
+ StatemData) ->
+ {Connection1, State1} =
+ case Monitors of
+ #{MonitorRef := Stream} ->
+ Monitors1 = maps:remove(MonitorRef, Monitors),
+ C = Connection#stream_connection{monitors = Monitors1},
+ case clean_state_after_stream_deletion_or_failure(Stream, C,
+ State)
+ of
+ {cleaned, NewConnection, NewState} ->
+ Command =
+ {metadata_update, Stream,
+ ?RESPONSE_CODE_STREAM_NOT_AVAILABLE},
+ Frame = rabbit_stream_core:frame(Command),
+ send(Transport, S, Frame),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_NOT_AVAILABLE,
+ 1),
+ {NewConnection, NewState};
+ {not_cleaned, SameConnection, SameState} ->
+ {SameConnection, SameState}
+ end;
+ _ ->
+ {Connection, State}
+ end,
+ {keep_state,
+ StatemData#statem_data{connection = Connection1,
+ connection_state = State1}};
+open(info, heartbeat_send,
+ #statem_data{transport = Transport,
+ connection = #stream_connection{socket = S} = Connection}) ->
+ Frame = rabbit_stream_core:frame(heartbeat),
+ case catch send(Transport, S, Frame) of
+ ok ->
+ keep_state_and_data;
+ Unexpected ->
+ rabbit_log_connection:info("Heartbeat send error ~p, closing connection",
+ [Unexpected]),
+ _C1 = demonitor_all_streams(Connection),
+ stop
+ end;
+open(info, heartbeat_timeout,
+ #statem_data{connection = #stream_connection{} = Connection}) ->
+ rabbit_log_connection:debug("Heartbeat timeout, closing connection"),
+ _C1 = demonitor_all_streams(Connection),
+ stop;
+open(info, {infos, From},
+ #statem_data{connection =
+ #stream_connection{client_properties =
+ ClientProperties}}) ->
+ From ! {self(), ClientProperties},
+ keep_state_and_data;
+open(info, emit_stats,
+ #statem_data{connection = Connection, connection_state = State} =
+ StatemData) ->
+ Connection1 = emit_stats(Connection, State),
+ {keep_state, StatemData#statem_data{connection = Connection1}};
+open(info, Unknown, _StatemData) ->
+ rabbit_log_connection:warning("Received unknown message ~p in state ~s",
+ [Unknown, ?FUNCTION_NAME]),
+ %% FIXME send close
+ keep_state_and_data;
+open({call, From}, info,
+ #statem_data{connection = Connection, connection_state = State}) ->
+ {keep_state_and_data,
+ {reply, From, infos(?INFO_ITEMS, Connection, State)}};
+open({call, From}, {info, Items},
+ #statem_data{connection = Connection, connection_state = State}) ->
+ {keep_state_and_data, {reply, From, infos(Items, Connection, State)}};
+open({call, From}, {consumers_info, Items},
+ #statem_data{connection_state = State}) ->
+ {keep_state_and_data, {reply, From, consumers_infos(Items, State)}};
+open({call, From}, {publishers_info, Items},
+ #statem_data{connection = Connection}) ->
+ {keep_state_and_data,
+ {reply, From, publishers_infos(Items, Connection)}};
+open({call, From}, {shutdown, Explanation},
+ #statem_data{connection = Connection}) ->
+ % likely closing call from the management plugin
+ rabbit_log_connection:info("Forcing stream connection ~p closing: ~p",
+ [self(), Explanation]),
+ demonitor_all_streams(Connection),
+ {stop_and_reply, normal, {reply, From, ok}};
+open(cast,
+ {queue_event, _, {osiris_written, _, undefined, CorrelationList}},
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{socket = S,
+ credits = Credits,
+ heartbeater = Heartbeater,
+ publishers = Publishers} =
+ Connection,
+ connection_state =
+ #stream_connection_state{blocked = Blocked} = State,
+ config = Configuration} =
+ StatemData) ->
+ ByPublisher =
+ lists:foldr(fun({PublisherId, PublishingId}, Acc) ->
+ case maps:is_key(PublisherId, Publishers) of
+ true ->
+ case maps:get(PublisherId, Acc, undefined) of
+ undefined ->
+ Acc#{PublisherId => [PublishingId]};
+ Ids ->
+ Acc#{PublisherId => [PublishingId | Ids]}
+ end;
+ false -> Acc
+ end
+ end,
+ #{}, CorrelationList),
+ _ = maps:map(fun(PublisherId, PublishingIds) ->
+ Command = {publish_confirm, PublisherId, PublishingIds},
+ send(Transport, S, rabbit_stream_core:frame(Command)),
+ #{PublisherId := #publisher{message_counters = Cnt}} =
+ Publishers,
+ increase_messages_confirmed(Cnt, length(PublishingIds))
+ end,
+ ByPublisher),
+ CorrelationIdCount = length(CorrelationList),
+ add_credits(Credits, CorrelationIdCount),
+ State1 =
+ case Blocked of
+ true ->
+ case should_unblock(Connection, Configuration) of
+ true ->
+ Transport:setopts(S, [{active, once}]),
+ ok = rabbit_heartbeat:resume_monitor(Heartbeater),
+ State#stream_connection_state{blocked = false};
+ false ->
+ State
+ end;
+ false ->
+ State
+ end,
+ {keep_state, StatemData#statem_data{connection_state = State1}};
+open(cast,
+ {queue_event, _QueueResource,
+ {osiris_written,
+ #resource{name = Stream},
+ PublisherReference,
+ CorrelationList}},
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{socket = S,
+ credits = Credits,
+ heartbeater = Heartbeater,
+ publishers = Publishers,
+ publisher_to_ids = PublisherRefToIds} =
+ Connection,
+ connection_state =
+ #stream_connection_state{blocked = Blocked} = State,
+ config = Configuration} =
+ StatemData) ->
+ PublishingIdCount = length(CorrelationList),
+ case maps:get({Stream, PublisherReference}, PublisherRefToIds,
+ undefined)
+ of
+ undefined ->
ok;
- {Error, S, Reason} ->
- rabbit_log:info("Socket error ~p [~w]~n", [Reason, S, self()]),
- close(Transport, S),
- rabbit_networking:unregister_non_amqp_connection(self()),
- notify_connection_closed(Connection, State);
- M ->
- rabbit_log:warning("Ignored message on closing ~p~n", [M])
- end.
-
-handle_inbound_data_pre_auth(Transport, Connection, State, Rest) ->
- handle_inbound_data(Transport, Connection, State, Rest, fun handle_frame_pre_auth/5).
-
-handle_inbound_data_post_auth(Transport, Connection, State, Rest) ->
- handle_inbound_data(Transport, Connection, State, Rest, fun handle_frame_post_auth/5).
-
-handle_inbound_data_post_close(Transport, Connection, State, Rest) ->
- handle_inbound_data(Transport, Connection, State, Rest, fun handle_frame_post_close/5).
-
-handle_inbound_data(_Transport, Connection, State, <<>>, _HandleFrameFun) ->
- {Connection, State};
-handle_inbound_data(Transport, #stream_connection{frame_max = FrameMax} = Connection,
- #stream_connection_state{data = none} = State, <<Size:32, _Frame:Size/binary, _Rest/bits>>, _HandleFrameFun)
- when FrameMax /= 0 andalso Size > FrameMax - 4 ->
- CloseReason = <<"frame too large">>,
- CloseReasonLength = byte_size(CloseReason),
- CloseFrame = <<?COMMAND_CLOSE:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_FRAME_TOO_LARGE:16,
- CloseReasonLength:16, CloseReason:CloseReasonLength/binary>>,
- frame(Transport, Connection, CloseFrame),
- {Connection#stream_connection{connection_step = close_sent}, State};
-handle_inbound_data(Transport, Connection,
- #stream_connection_state{data = none} = State, <<Size:32, Frame:Size/binary, Rest/bits>>, HandleFrameFun) ->
- {Connection1, State1, Rest1} = HandleFrameFun(Transport, Connection, State, Frame, Rest),
- handle_inbound_data(Transport, Connection1, State1, Rest1, HandleFrameFun);
-handle_inbound_data(_Transport, Connection, #stream_connection_state{data = none} = State, Data, _HandleFrameFun) ->
- {Connection, State#stream_connection_state{data = Data}};
-handle_inbound_data(Transport, Connection, #stream_connection_state{data = Leftover} = State, Data, HandleFrameFun) ->
- State1 = State#stream_connection_state{data = none},
- %% FIXME avoid concatenation to avoid a new binary allocation
- %% see osiris_replica:parse_chunk/3
- handle_inbound_data(Transport, Connection, State1, <<Leftover/binary, Data/binary>>, HandleFrameFun).
-
-generate_publishing_error_details(Acc, _Code, <<>>) ->
- Acc;
-generate_publishing_error_details(Acc, Code, <<PublishingId:64, MessageSize:32, _Message:MessageSize/binary, Rest/binary>>) ->
- generate_publishing_error_details(
- <<Acc/binary, PublishingId:64, Code:16>>,
- Code,
- Rest).
-
-handle_frame_pre_auth(Transport, #stream_connection{socket = S} = Connection, State,
- <<?COMMAND_PEER_PROPERTIES:16, ?VERSION_0:16, CorrelationId:32,
- ClientPropertiesCount:32, ClientPropertiesFrame/binary>>, Rest) ->
-
- {ClientProperties, _} = rabbit_stream_utils:parse_map(ClientPropertiesFrame, ClientPropertiesCount),
+ PublisherId ->
+ Command = {publish_confirm, PublisherId, CorrelationList},
+ send(Transport, S, rabbit_stream_core:frame(Command)),
+ #{PublisherId := #publisher{message_counters = Counters}} =
+ Publishers,
+ increase_messages_confirmed(Counters, PublishingIdCount)
+ end,
+ add_credits(Credits, PublishingIdCount),
+ State1 =
+ case Blocked of
+ true ->
+ case should_unblock(Connection, Configuration) of
+ true ->
+ Transport:setopts(S, [{active, once}]),
+ ok = rabbit_heartbeat:resume_monitor(Heartbeater),
+ State#stream_connection_state{blocked = false};
+ false ->
+ State
+ end;
+ false ->
+ State
+ end,
+ {keep_state, StatemData#statem_data{connection_state = State1}};
+open(cast,
+ {queue_event, #resource{name = StreamName},
+ {osiris_offset, _QueueResource, -1}},
+ _StatemData) ->
+ rabbit_log:debug("Stream protocol connection received osiris offset "
+ "event for ~p with offset ~p",
+ [StreamName, -1]),
+ keep_state_and_data;
+open(cast,
+ {queue_event, #resource{name = StreamName},
+ {osiris_offset, _QueueResource, Offset}},
+ #statem_data{transport = Transport,
+ connection =
+ #stream_connection{stream_subscriptions =
+ StreamSubscriptions,
+ send_file_oct = SendFileOct} =
+ Connection,
+ connection_state =
+ #stream_connection_state{consumers = Consumers} = State} =
+ StatemData)
+ when Offset > -1 ->
+ {Connection1, State1} =
+ case maps:get(StreamName, StreamSubscriptions, undefined) of
+ undefined ->
+ rabbit_log:debug("Stream protocol connection: osiris offset event "
+ "for ~p, but no subscription (leftover messages "
+ "after unsubscribe?)",
+ [StreamName]),
+ {Connection, State};
+ [] ->
+ rabbit_log:debug("Stream protocol connection: osiris offset event "
+ "for ~p, but no registered consumers!",
+ [StreamName]),
+ {Connection#stream_connection{stream_subscriptions =
+ maps:remove(StreamName,
+ StreamSubscriptions)},
+ State};
+ CorrelationIds when is_list(CorrelationIds) ->
+ Consumers1 =
+ lists:foldl(fun(CorrelationId, ConsumersAcc) ->
+ #{CorrelationId := Consumer} = ConsumersAcc,
+ #consumer{credit = Credit} = Consumer,
+ Consumer1 =
+ case Credit of
+ 0 -> Consumer;
+ _ ->
+ case send_chunks(Transport,
+ Consumer,
+ SendFileOct)
+ of
+ {error, closed} ->
+ rabbit_log_connection:info("Stream protocol connection has been closed by "
+ "peer",
+ []),
+ throw({stop, normal});
+ {error, Reason} ->
+ rabbit_log_connection:info("Error while sending chunks: ~p",
+ [Reason]),
+ %% likely a connection problem
+ Consumer;
+ {{segment, Log1},
+ {credit, Credit1}} ->
+ Consumer#consumer{log =
+ Log1,
+ credit
+ =
+ Credit1}
+ end
+ end,
+ ConsumersAcc#{CorrelationId => Consumer1}
+ end,
+ Consumers, CorrelationIds),
+ {Connection,
+ State#stream_connection_state{consumers = Consumers1}}
+ end,
+ {keep_state,
+ StatemData#statem_data{connection = Connection1,
+ connection_state = State1}};
+open(cast, {force_event_refresh, Ref},
+ #statem_data{connection = Connection, connection_state = State} =
+ StatemData) ->
+ Infos =
+ augment_infos_with_user_provided_connection_name(infos(?CREATION_EVENT_KEYS,
+ Connection,
+ State),
+ Connection),
+ rabbit_event:notify(connection_created, Infos, Ref),
+ Connection1 =
+ rabbit_event:init_stats_timer(Connection,
+ #stream_connection.stats_timer),
+ Connection2 = ensure_stats_timer(Connection1),
+ {keep_state, StatemData#statem_data{connection = Connection2}}.
+
+close_sent(enter, _OldState,
+ #statem_data{config =
+ #configuration{connection_negotiation_step_timeout =
+ StateTimeout}}) ->
+ {keep_state_and_data, {state_timeout, StateTimeout, close}};
+close_sent(state_timeout, close, #statem_data{}) ->
+ rabbit_log_connection:warning("Closing connection because of timeout in state "
+ "'~s' likely due to lack of client action.",
+ [?FUNCTION_NAME]),
+ stop;
+close_sent(info, {tcp, S, Data},
+ #statem_data{transport = Transport,
+ connection = Connection,
+ connection_state = State} =
+ StatemData)
+ when byte_size(Data) > 1 ->
+ {Connection1, State1} =
+ handle_inbound_data_post_close(Transport, Connection, State, Data),
+ #stream_connection{connection_step = Step} = Connection1,
+ rabbit_log_connection:debug("Stream reader has transitioned from ~s to ~s",
+ [?FUNCTION_NAME, Step]),
+ case Step of
+ closing_done ->
+ stop;
+ _ ->
+ Transport:setopts(S, [{active, once}]),
+ {keep_state,
+ StatemData#statem_data{connection = Connection1,
+ connection_state = State1}}
+ end;
+close_sent(info, {tcp_closed, S}, _StatemData) ->
+ rabbit_log_connection:debug("Stream protocol connection socket ~w closed [~w]",
+ [S, self()]),
+ stop;
+close_sent(info, {tcp_error, S, Reason}, #statem_data{}) ->
+ rabbit_log_connection:error("Stream protocol connection socket error: ~p [~w] "
+ "[~w]",
+ [Reason, S, self()]),
+ stop;
+close_sent(info, {resource_alarm, IsThereAlarm},
+ StatemData = #statem_data{connection = Connection}) ->
+ rabbit_log:warning("Stream protocol connection ignored a resource "
+ "alarm ~p in state ~s",
+ [IsThereAlarm, ?FUNCTION_NAME]),
+ {keep_state,
+ StatemData#statem_data{connection =
+ Connection#stream_connection{resource_alarm =
+ IsThereAlarm}}};
+close_sent(info, Msg, _StatemData) ->
+ rabbit_log_connection:warning("Ignored unknown message ~p in state ~s",
+ [Msg, ?FUNCTION_NAME]),
+ keep_state_and_data.
+
+handle_inbound_data_pre_auth(Transport, Connection, State, Data) ->
+ handle_inbound_data(Transport,
+ Connection,
+ State,
+ Data,
+ fun handle_frame_pre_auth/4).
+
+handle_inbound_data_post_auth(Transport, Connection, State, Data) ->
+ handle_inbound_data(Transport,
+ Connection,
+ State,
+ Data,
+ fun handle_frame_post_auth/4).
+
+handle_inbound_data_post_close(Transport, Connection, State, Data) ->
+ handle_inbound_data(Transport,
+ Connection,
+ State,
+ Data,
+ fun handle_frame_post_close/4).
+
+handle_inbound_data(Transport,
+ Connection,
+ #stream_connection_state{data = CoreState0} = State,
+ Data,
+ HandleFrameFun) ->
+ CoreState1 = rabbit_stream_core:incoming_data(Data, CoreState0),
+ {Commands, CoreState} = rabbit_stream_core:all_commands(CoreState1),
+ lists:foldl(fun(Command, {C, S}) ->
+ HandleFrameFun(Transport, C, S, Command)
+ end,
+ {Connection, State#stream_connection_state{data = CoreState}},
+ Commands).
+
+publishing_ids_from_messages(<<>>) ->
+ [];
+publishing_ids_from_messages(<<PublishingId:64,
+ 0:1,
+ MessageSize:31,
+ _Message:MessageSize/binary,
+ Rest/binary>>) ->
+ [PublishingId | publishing_ids_from_messages(Rest)];
+publishing_ids_from_messages(<<PublishingId:64,
+ 1:1,
+ _CompressionType:3,
+ _Unused:4,
+ _MessageCount:16,
+ _UncompressedSize:32,
+ BatchSize:32,
+ _Batch:BatchSize/binary,
+ Rest/binary>>) ->
+ [PublishingId | publishing_ids_from_messages(Rest)].
+handle_frame_pre_auth(Transport,
+ #stream_connection{socket = S} = Connection,
+ State,
+ {request, CorrelationId,
+ {peer_properties, ClientProperties}}) ->
{ok, Product} = application:get_key(rabbit, description),
{ok, Version} = application:get_key(rabbit, vsn),
%% Get any configuration-specified server properties
- RawConfigServerProps = application:get_env(rabbit,
- server_properties, []),
-
- ConfigServerProperties = lists:foldl(fun({K, V}, Acc) ->
- maps:put(rabbit_data_coercion:to_binary(K), V, Acc)
- end, #{}, RawConfigServerProps),
-
- ServerProperties = maps:merge(ConfigServerProperties, #{
- <<"product">> => Product,
- <<"version">> => Version,
- <<"cluster_name">> => rabbit_nodes:cluster_name(),
- <<"platform">> => rabbit_misc:platform_and_version(),
- <<"copyright">> => ?COPYRIGHT_MESSAGE,
- <<"information">> => ?INFORMATION_MESSAGE
- }),
-
- ServerPropertiesCount = map_size(ServerProperties),
-
- ServerPropertiesFragment = maps:fold(fun(K, V, Acc) ->
- Key = rabbit_data_coercion:to_binary(K),
- Value = rabbit_data_coercion:to_binary(V),
- KeySize = byte_size(Key),
- ValueSize = byte_size(Value),
- <<Acc/binary, KeySize:16, Key:KeySize/binary, ValueSize:16, Value:ValueSize/binary>>
- end, <<>>, ServerProperties),
-
- Frame = <<?COMMAND_PEER_PROPERTIES:16, ?VERSION_0:16, CorrelationId:32, ?RESPONSE_CODE_OK:16,
- ServerPropertiesCount:32, ServerPropertiesFragment/binary>>,
- FrameSize = byte_size(Frame),
-
- Transport:send(S, [<<FrameSize:32>>, <<Frame/binary>>]),
- {Connection#stream_connection{client_properties = ClientProperties, authentication_state = peer_properties_exchanged}, State, Rest};
-handle_frame_pre_auth(Transport, #stream_connection{socket = S} = Connection, State,
- <<?COMMAND_SASL_HANDSHAKE:16, ?VERSION_0:16, CorrelationId:32>>, Rest) ->
-
+ RawConfigServerProps =
+ application:get_env(rabbit, server_properties, []),
+
+ ConfigServerProperties =
+ lists:foldl(fun({K, V}, Acc) ->
+ maps:put(
+ rabbit_data_coercion:to_binary(K),
+ rabbit_data_coercion:to_binary(V), Acc)
+ end,
+ #{}, RawConfigServerProps),
+
+ ServerProperties0 =
+ maps:merge(ConfigServerProperties,
+ #{<<"product">> => Product,
+ <<"version">> => Version,
+ <<"cluster_name">> => rabbit_nodes:cluster_name(),
+ <<"platform">> => rabbit_misc:platform_and_version(),
+ <<"copyright">> => ?COPYRIGHT_MESSAGE,
+ <<"information">> => ?INFORMATION_MESSAGE}),
+ ServerProperties =
+ maps:map(fun(_, V) -> rabbit_data_coercion:to_binary(V) end,
+ ServerProperties0),
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ {peer_properties, ?RESPONSE_CODE_OK,
+ ServerProperties}}),
+ send(Transport, S, Frame),
+ {Connection#stream_connection{client_properties = ClientProperties,
+ authentication_state =
+ peer_properties_exchanged,
+ connection_step = peer_properties_exchanged},
+ State};
+handle_frame_pre_auth(Transport,
+ #stream_connection{socket = S} = Connection,
+ State,
+ {request, CorrelationId, sasl_handshake}) ->
Mechanisms = rabbit_stream_utils:auth_mechanisms(S),
- MechanismsFragment = lists:foldl(fun(M, Acc) ->
- Size = byte_size(M),
- <<Acc/binary, Size:16, M:Size/binary>>
- end, <<>>, Mechanisms),
- MechanismsCount = length(Mechanisms),
- Frame = <<?COMMAND_SASL_HANDSHAKE:16, ?VERSION_0:16, CorrelationId:32, ?RESPONSE_CODE_OK:16,
- MechanismsCount:32, MechanismsFragment/binary>>,
- FrameSize = byte_size(Frame),
-
- Transport:send(S, [<<FrameSize:32>>, <<Frame/binary>>]),
- {Connection, State, Rest};
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ {sasl_handshake, ?RESPONSE_CODE_OK,
+ Mechanisms}}),
+ send(Transport, S, Frame),
+ {Connection#stream_connection{connection_step = authenticating},
+ State};
handle_frame_pre_auth(Transport,
- #stream_connection{socket = S,
- authentication_state = AuthState0,
- host = Host} = Connection0, State,
- <<?COMMAND_SASL_AUTHENTICATE:16, ?VERSION_0:16, CorrelationId:32,
- MechanismLength:16, Mechanism:MechanismLength/binary,
- SaslFragment/binary>>, Rest) ->
-
- SaslBin = case SaslFragment of
- <<-1:32/signed>> ->
- <<>>;
- <<SaslBinaryLength:32, SaslBinary:SaslBinaryLength/binary>> ->
- SaslBinary
- end,
-
- {Connection1, Rest1} = case rabbit_stream_utils:auth_mechanism_to_module(Mechanism, S) of
- {ok, AuthMechanism} ->
- AuthState = case AuthState0 of
- none ->
- AuthMechanism:init(S);
- AS ->
- AS
- end,
- RemoteAddress = list_to_binary(inet:ntoa(Host)),
- C1 = Connection0#stream_connection{auth_mechanism = {Mechanism, AuthMechanism}},
- {C2, FrameFragment} =
- case AuthMechanism:handle_response(SaslBin, AuthState) of
- {refused, Username, Msg, Args} ->
- rabbit_core_metrics:auth_attempt_failed(RemoteAddress, Username, stream),
- auth_fail(Username, Msg, Args, C1, State),
- rabbit_log:warning(Msg, Args),
- {C1#stream_connection{connection_step = failure}, <<?RESPONSE_AUTHENTICATION_FAILURE:16>>};
- {protocol_error, Msg, Args} ->
- rabbit_core_metrics:auth_attempt_failed(RemoteAddress, <<>>, stream),
- notify_auth_result(none, user_authentication_failure,
- [{error, rabbit_misc:format(Msg, Args)}],
- C1, State),
- rabbit_log:warning(Msg, Args),
- {C1#stream_connection{connection_step = failure}, <<?RESPONSE_SASL_ERROR:16>>};
- {challenge, Challenge, AuthState1} ->
- rabbit_core_metrics:auth_attempt_succeeded(RemoteAddress, <<>>, stream),
- ChallengeSize = byte_size(Challenge),
- {C1#stream_connection{authentication_state = AuthState1, connection_step = authenticating},
- <<?RESPONSE_SASL_CHALLENGE:16, ChallengeSize:32, Challenge/binary>>
- };
- {ok, User = #user{username = Username}} ->
- case rabbit_access_control:check_user_loopback(Username, S) of
- ok ->
- rabbit_core_metrics:auth_attempt_succeeded(RemoteAddress, Username, stream),
- notify_auth_result(Username, user_authentication_success,
- [], C1, State),
- {C1#stream_connection{authentication_state = done, user = User, connection_step = authenticated},
- <<?RESPONSE_CODE_OK:16>>
- };
- not_allowed ->
- rabbit_core_metrics:auth_attempt_failed(RemoteAddress, Username, stream),
- rabbit_log:warning("User '~s' can only connect via localhost~n", [Username]),
- {C1#stream_connection{connection_step = failure}, <<?RESPONSE_SASL_AUTHENTICATION_FAILURE_LOOPBACK:16>>}
- end
- end,
- Frame = <<?COMMAND_SASL_AUTHENTICATE:16, ?VERSION_0:16, CorrelationId:32, FrameFragment/binary>>,
- frame(Transport, C1, Frame),
- {C2, Rest};
- {error, _} ->
- Frame = <<?COMMAND_SASL_AUTHENTICATE:16, ?VERSION_0:16, CorrelationId:32, ?RESPONSE_SASL_MECHANISM_NOT_SUPPORTED:16>>,
- frame(Transport, Connection0, Frame),
- {Connection0#stream_connection{connection_step = failure}, Rest}
- end,
+ #stream_connection{socket = S,
+ authentication_state = AuthState0,
+ host = Host} =
+ Connection0,
+ State,
+ {request, CorrelationId,
+ {sasl_authenticate, Mechanism, SaslBin}}) ->
+ Connection1 =
+ case rabbit_stream_utils:auth_mechanism_to_module(Mechanism, S) of
+ {ok, AuthMechanism} ->
+ AuthState =
+ case AuthState0 of
+ none ->
+ AuthMechanism:init(S);
+ AS ->
+ AS
+ end,
+ RemoteAddress = list_to_binary(inet:ntoa(Host)),
+ C1 = Connection0#stream_connection{auth_mechanism =
+ {Mechanism,
+ AuthMechanism}},
+ {C2, CmdBody} =
+ case AuthMechanism:handle_response(SaslBin, AuthState) of
+ {refused, Username, Msg, Args} ->
+ rabbit_core_metrics:auth_attempt_failed(RemoteAddress,
+ Username,
+ stream),
+ auth_fail(Username, Msg, Args, C1, State),
+ rabbit_log_connection:warning(Msg, Args),
+ {C1#stream_connection{connection_step = failure},
+ {sasl_authenticate,
+ ?RESPONSE_AUTHENTICATION_FAILURE, <<>>}};
+ {protocol_error, Msg, Args} ->
+ rabbit_core_metrics:auth_attempt_failed(RemoteAddress,
+ <<>>,
+ stream),
+ notify_auth_result(none,
+ user_authentication_failure,
+ [{error,
+ rabbit_misc:format(Msg,
+ Args)}],
+ C1,
+ State),
+ rabbit_log_connection:warning(Msg, Args),
+ {C1#stream_connection{connection_step = failure},
+ {sasl_authenticate, ?RESPONSE_SASL_ERROR, <<>>}};
+ {challenge, Challenge, AuthState1} ->
+ rabbit_core_metrics:auth_attempt_succeeded(RemoteAddress,
+ <<>>,
+ stream),
+ {C1#stream_connection{authentication_state =
+ AuthState1,
+ connection_step =
+ authenticating},
+ {sasl_authenticate, ?RESPONSE_SASL_CHALLENGE,
+ Challenge}};
+ {ok, User = #user{username = Username}} ->
+ case
+ rabbit_access_control:check_user_loopback(Username,
+ S)
+ of
+ ok ->
+ rabbit_core_metrics:auth_attempt_succeeded(RemoteAddress,
+ Username,
+ stream),
+ notify_auth_result(Username,
+ user_authentication_success,
+ [],
+ C1,
+ State),
+ {C1#stream_connection{authentication_state =
+ done,
+ user = User,
+ connection_step =
+ authenticated},
+ {sasl_authenticate, ?RESPONSE_CODE_OK,
+ <<>>}};
+ not_allowed ->
+ rabbit_core_metrics:auth_attempt_failed(RemoteAddress,
+ Username,
+ stream),
+ rabbit_log_connection:warning("User '~s' can only connect via localhost",
+ [Username]),
+ {C1#stream_connection{connection_step =
+ failure},
+ {sasl_authenticate,
+ ?RESPONSE_SASL_AUTHENTICATION_FAILURE_LOOPBACK,
+ <<>>}}
+ end
+ end,
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ CmdBody}),
+ send(Transport, S, Frame),
+ C2;
+ {error, _} ->
+ CmdBody =
+ {sasl_authenticate, ?RESPONSE_SASL_MECHANISM_NOT_SUPPORTED,
+ <<>>},
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ CmdBody}),
+ send(Transport, S, Frame),
+ Connection0#stream_connection{connection_step = failure}
+ end,
- {Connection1, State, Rest1};
-handle_frame_pre_auth(_Transport, #stream_connection{helper_sup = SupPid, socket = Sock, name = ConnectionName} = Connection, State,
- <<?COMMAND_TUNE:16, ?VERSION_0:16, FrameMax:32, Heartbeat:32>>, Rest) ->
- rabbit_log:info("Tuning response ~p ~p ~n", [FrameMax, Heartbeat]),
+ {Connection1, State};
+handle_frame_pre_auth(Transport,
+ Connection,
+ State,
+ {response, _, {tune, _, _} = Tune}) ->
+ ?FUNCTION_NAME(Transport, Connection, State, Tune);
+handle_frame_pre_auth(_Transport,
+ #stream_connection{helper_sup = SupPid,
+ socket = Sock,
+ name = ConnectionName} =
+ Connection,
+ #stream_connection_state{blocked = Blocked} = State,
+ {tune, FrameMax, Heartbeat}) ->
+ rabbit_log_connection:debug("Tuning response ~p ~p ",
+ [FrameMax, Heartbeat]),
Parent = self(),
%% sending a message to the main process so the heartbeat frame is sent from this main process
%% otherwise heartbeat frames can interleave with chunk delivery
@@ -631,481 +1336,1226 @@ handle_frame_pre_auth(_Transport, #stream_connection{helper_sup = SupPid, socket
%% we don't want a heartbeat frame to sneak in in-between)
SendFun =
fun() ->
- Parent ! heartbeat_send,
- ok
+ Parent ! heartbeat_send,
+ ok
end,
ReceiveFun = fun() -> Parent ! heartbeat_timeout end,
- Heartbeater = rabbit_heartbeat:start(
- SupPid, Sock, ConnectionName,
- Heartbeat, SendFun, Heartbeat, ReceiveFun),
-
- {Connection#stream_connection{connection_step = tuned, frame_max = FrameMax,
- heartbeat = Heartbeat, heartbeater = Heartbeater}, State, Rest};
-handle_frame_pre_auth(Transport, #stream_connection{user = User, socket = S} = Connection, State,
- <<?COMMAND_OPEN:16, ?VERSION_0:16, CorrelationId:32,
- VirtualHostLength:16, VirtualHost:VirtualHostLength/binary>>, Rest) ->
-
+ Heartbeater =
+ rabbit_heartbeat:start(SupPid,
+ Sock,
+ ConnectionName,
+ Heartbeat,
+ SendFun,
+ Heartbeat,
+ ReceiveFun),
+ case Blocked of
+ true ->
+ ok = rabbit_heartbeat:pause_monitor(Heartbeater);
+ _ ->
+ ok
+ end,
+ {Connection#stream_connection{connection_step = tuned,
+ frame_max = FrameMax,
+ heartbeat = Heartbeat,
+ heartbeater = Heartbeater},
+ State};
+handle_frame_pre_auth(Transport,
+ #stream_connection{user = User,
+ socket = S,
+ transport = TransportLayer} =
+ Connection,
+ State,
+ {request, CorrelationId, {open, VirtualHost}}) ->
%% FIXME enforce connection limit (see rabbit_reader:is_over_connection_limit/2)
+ rabbit_log:debug("Open frame received for ~s", [VirtualHost]),
+ Connection1 =
+ try
+ rabbit_access_control:check_vhost_access(User,
+ VirtualHost,
+ {socket, S},
+ #{}),
+ AdvertisedHost =
+ case TransportLayer of
+ tcp ->
+ rabbit_stream:host();
+ ssl ->
+ rabbit_stream:tls_host()
+ end,
+ AdvertisedPort =
+ case TransportLayer of
+ tcp ->
+ rabbit_data_coercion:to_binary(
+ rabbit_stream:port());
+ ssl ->
+ rabbit_data_coercion:to_binary(
+ rabbit_stream:tls_port())
+ end,
- {Connection1, Frame} = try
- rabbit_access_control:check_vhost_access(User, VirtualHost, {socket, S}, #{}),
- F = <<?COMMAND_OPEN:16, ?VERSION_0:16, CorrelationId:32, ?RESPONSE_CODE_OK:16>>,
- %% FIXME check if vhost is alive (see rabbit_reader:is_vhost_alive/2)
- {Connection#stream_connection{connection_step = opened, virtual_host = VirtualHost}, F}
- catch
- exit:_ ->
- Fr = <<?COMMAND_OPEN:16, ?VERSION_0:16, CorrelationId:32, ?RESPONSE_VHOST_ACCESS_FAILURE:16>>,
- {Connection#stream_connection{connection_step = failure}, Fr}
- end,
-
- frame(Transport, Connection1, Frame),
+ ConnectionProperties =
+ #{<<"advertised_host">> => AdvertisedHost,
+ <<"advertised_port">> => AdvertisedPort},
+
+ rabbit_log:debug("sending open response ok ~s", [VirtualHost]),
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ {open, ?RESPONSE_CODE_OK,
+ ConnectionProperties}}),
+
+ send(Transport, S, Frame),
+ %% FIXME check if vhost is alive (see rabbit_reader:is_vhost_alive/2)
+ Connection#stream_connection{connection_step = opened,
+ virtual_host = VirtualHost}
+ catch
+ exit:_ ->
+ F = rabbit_stream_core:frame({response, CorrelationId,
+ {open,
+ ?RESPONSE_VHOST_ACCESS_FAILURE,
+ #{}}}),
+ send(Transport, S, F),
+ Connection#stream_connection{connection_step = failure}
+ end,
- {Connection1, State, Rest};
-handle_frame_pre_auth(_Transport, Connection, State, <<?COMMAND_HEARTBEAT:16, ?VERSION_0:16>>, Rest) ->
- rabbit_log:info("Received heartbeat frame pre auth~n"),
- {Connection, State, Rest};
-handle_frame_pre_auth(_Transport, Connection, State, Frame, Rest) ->
- rabbit_log:warning("unknown frame ~p ~p, closing connection.~n", [Frame, Rest]),
- {Connection#stream_connection{connection_step = failure}, State, Rest}.
+ {Connection1, State};
+handle_frame_pre_auth(_Transport, Connection, State, heartbeat) ->
+ rabbit_log:debug("Received heartbeat frame pre auth"),
+ {Connection, State};
+handle_frame_pre_auth(_Transport, Connection, State, Command) ->
+ rabbit_log_connection:warning("unknown command ~w, closing connection.",
+ [Command]),
+ {Connection#stream_connection{connection_step = failure}, State}.
auth_fail(Username, Msg, Args, Connection, ConnectionState) ->
- notify_auth_result(Username, user_authentication_failure,
- [{error, rabbit_misc:format(Msg, Args)}], Connection, ConnectionState).
-
-notify_auth_result(Username, AuthResult, ExtraProps, Connection, ConnectionState) ->
- EventProps = [{connection_type, network},
- {name, case Username of none -> ''; _ -> Username end}] ++
- [case Item of
- name -> {connection_name, i(name, Connection, ConnectionState)};
- _ -> {Item, i(Item, Connection, ConnectionState)}
- end || Item <- ?AUTH_NOTIFICATION_INFO_KEYS] ++
- ExtraProps,
- rabbit_event:notify(AuthResult, [P || {_, V} = P <- EventProps, V =/= '']).
-
-handle_frame_post_auth(Transport, #stream_connection{socket = S, credits = Credits,
- virtual_host = VirtualHost, user = User} = Connection, State,
- <<?COMMAND_PUBLISH:16, ?VERSION_0:16,
- StreamSize:16, Stream:StreamSize/binary,
- PublisherId:8/unsigned,
- MessageCount:32, Messages/binary>>, Rest) ->
- case rabbit_stream_utils:check_write_permitted(
- #resource{name = Stream, kind = queue, virtual_host = VirtualHost},
- User,
- #{}) of
+ notify_auth_result(Username,
+ user_authentication_failure,
+ [{error, rabbit_misc:format(Msg, Args)}],
+ Connection,
+ ConnectionState).
+
+notify_auth_result(Username,
+ AuthResult,
+ ExtraProps,
+ Connection,
+ ConnectionState) ->
+ EventProps =
+ [{connection_type, network},
+ {name,
+ case Username of
+ none ->
+ '';
+ _ ->
+ Username
+ end}]
+ ++ [case Item of
+ name ->
+ {connection_name, i(name, Connection, ConnectionState)};
+ _ ->
+ {Item, i(Item, Connection, ConnectionState)}
+ end
+ || Item <- ?AUTH_NOTIFICATION_INFO_KEYS]
+ ++ ExtraProps,
+ rabbit_event:notify(AuthResult,
+ [P || {_, V} = P <- EventProps, V =/= '']).
+
+handle_frame_post_auth(Transport,
+ #stream_connection{resource_alarm = true} = Connection0,
+ State,
+ {request, CorrelationId,
+ {declare_publisher,
+ PublisherId,
+ _WriterRef,
+ Stream}}) ->
+ rabbit_log_connection:info("Cannot create publisher ~p on stream ~p, connection "
+ "is blocked because of resource alarm",
+ [PublisherId, Stream]),
+ response(Transport,
+ Connection0,
+ declare_publisher,
+ CorrelationId,
+ ?RESPONSE_CODE_PRECONDITION_FAILED),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?PRECONDITION_FAILED, 1),
+ {Connection0, State};
+handle_frame_post_auth(Transport,
+ #stream_connection{user = User,
+ publishers = Publishers0,
+ publisher_to_ids = RefIds0,
+ resource_alarm = false} =
+ Connection0,
+ State,
+ {request, CorrelationId,
+ {declare_publisher, PublisherId, WriterRef, Stream}}) ->
+ case rabbit_stream_utils:check_write_permitted(stream_r(Stream,
+ Connection0),
+ User, #{})
+ of
ok ->
- case lookup_leader(Stream, Connection) of
- cluster_not_found ->
- FrameSize = 2 + 2 + 1 + 4 + (8 + 2) * MessageCount,
- Details = generate_publishing_error_details(<<>>, ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST, Messages),
- Transport:send(S, [<<FrameSize:32, ?COMMAND_PUBLISH_ERROR:16, ?VERSION_0:16,
- PublisherId:8,
- MessageCount:32, Details/binary>>]),
- {Connection, State, Rest};
- {ClusterLeader, Connection1} ->
- rabbit_stream_utils:write_messages(ClusterLeader, PublisherId, Messages),
- sub_credits(Credits, MessageCount),
- {Connection1, State, Rest}
+ case {maps:is_key(PublisherId, Publishers0),
+ maps:is_key({Stream, WriterRef}, RefIds0)}
+ of
+ {false, false} ->
+ case lookup_leader(Stream, Connection0) of
+ {error, not_found} ->
+ response(Transport,
+ Connection0,
+ declare_publisher,
+ CorrelationId,
+ ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_DOES_NOT_EXIST,
+ 1),
+ {Connection0, State};
+ {error, not_available} ->
+ response(Transport,
+ Connection0,
+ declare_publisher,
+ CorrelationId,
+ ?RESPONSE_CODE_STREAM_NOT_AVAILABLE),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_NOT_AVAILABLE,
+ 1),
+ {Connection0, State};
+ {ClusterLeader,
+ #stream_connection{publishers = Publishers0,
+ publisher_to_ids = RefIds0} =
+ Connection1} ->
+ {PublisherReference, RefIds1} =
+ case WriterRef of
+ <<"">> ->
+ {undefined, RefIds0};
+ _ ->
+ {WriterRef,
+ RefIds0#{{Stream, WriterRef} =>
+ PublisherId}}
+ end,
+ Publisher =
+ #publisher{publisher_id = PublisherId,
+ stream = Stream,
+ reference = PublisherReference,
+ leader = ClusterLeader,
+ message_counters =
+ atomics:new(3,
+ [{signed, false}])},
+ response(Transport,
+ Connection0,
+ declare_publisher,
+ CorrelationId,
+ ?RESPONSE_CODE_OK),
+ rabbit_stream_metrics:publisher_created(self(),
+ stream_r(Stream,
+ Connection1),
+ PublisherId,
+ PublisherReference),
+ {Connection1#stream_connection{publishers =
+ Publishers0#{PublisherId
+ =>
+ Publisher},
+ publisher_to_ids =
+ RefIds1},
+ State}
+ end;
+ {_, _} ->
+ response(Transport,
+ Connection0,
+ declare_publisher,
+ CorrelationId,
+ ?RESPONSE_CODE_PRECONDITION_FAILED),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?PRECONDITION_FAILED,
+ 1),
+ {Connection0, State}
end;
error ->
- FrameSize = 2 + 2 + 1 + 4 + (8 + 2) * MessageCount,
- Details = generate_publishing_error_details(<<>>, ?RESPONSE_CODE_ACCESS_REFUSED, Messages),
- Transport:send(S, [<<FrameSize:32, ?COMMAND_PUBLISH_ERROR:16, ?VERSION_0:16,
- PublisherId:8,
- MessageCount:32, Details/binary>>]),
- {Connection, State, Rest}
+ response(Transport,
+ Connection0,
+ declare_publisher,
+ CorrelationId,
+ ?RESPONSE_CODE_ACCESS_REFUSED),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?ACCESS_REFUSED,
+ 1),
+ {Connection0, State}
end;
-handle_frame_post_auth(Transport, #stream_connection{socket = Socket,
- stream_subscriptions = StreamSubscriptions, virtual_host = VirtualHost, user = User,
- send_file_oct = SendFileOct} = Connection,
- #stream_connection_state{consumers = Consumers} = State,
- <<?COMMAND_SUBSCRIBE:16, ?VERSION_0:16, CorrelationId:32, SubscriptionId:8/unsigned, StreamSize:16, Stream:StreamSize/binary,
- OffsetType:16/signed, OffsetAndCredit/binary>>, Rest) ->
- case rabbit_stream_utils:check_read_permitted(
- #resource{name = Stream, kind = queue, virtual_host = VirtualHost},
- User,
- #{}) of
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S,
+ credits = Credits,
+ virtual_host = VirtualHost,
+ user = User,
+ publishers = Publishers} =
+ Connection,
+ State,
+ {publish, PublisherId, MessageCount, Messages}) ->
+ case Publishers of
+ #{PublisherId := Publisher} ->
+ #publisher{stream = Stream,
+ reference = Reference,
+ leader = Leader,
+ message_counters = Counters} =
+ Publisher,
+ increase_messages_received(Counters, MessageCount),
+ case rabbit_stream_utils:check_write_permitted(#resource{name =
+ Stream,
+ kind =
+ queue,
+ virtual_host
+ =
+ VirtualHost},
+ User, #{})
+ of
+ ok ->
+ rabbit_stream_utils:write_messages(Leader,
+ Reference,
+ PublisherId,
+ Messages),
+ sub_credits(Credits, MessageCount),
+ {Connection, State};
+ error ->
+ PublishingIds = publishing_ids_from_messages(Messages),
+ Command =
+ {publish_error,
+ PublisherId,
+ ?RESPONSE_CODE_ACCESS_REFUSED,
+ PublishingIds},
+ Frame = rabbit_stream_core:frame(Command),
+ send(Transport, S, Frame),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?ACCESS_REFUSED,
+ 1),
+ increase_messages_errored(Counters, MessageCount),
+ {Connection, State}
+ end;
+ _ ->
+ PublishingIds = publishing_ids_from_messages(Messages),
+ Command =
+ {publish_error,
+ PublisherId,
+ ?RESPONSE_CODE_PUBLISHER_DOES_NOT_EXIST,
+ PublishingIds},
+ Frame = rabbit_stream_core:frame(Command),
+ send(Transport, S, Frame),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?PUBLISHER_DOES_NOT_EXIST,
+ 1),
+ {Connection, State}
+ end;
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S,
+ virtual_host = VirtualHost,
+ user = User} =
+ Connection,
+ State,
+ {request, CorrelationId,
+ {query_publisher_sequence, Reference, Stream}}) ->
+ % FrameSize = ?RESPONSE_FRAME_SIZE + 8,
+ {ResponseCode, Sequence} =
+ case rabbit_stream_utils:check_read_permitted(#resource{name = Stream,
+ kind = queue,
+ virtual_host =
+ VirtualHost},
+ User, #{})
+ of
+ ok ->
+ case rabbit_stream_manager:lookup_local_member(VirtualHost,
+ Stream)
+ of
+ {error, not_found} ->
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_DOES_NOT_EXIST,
+ 1),
+ {?RESPONSE_CODE_STREAM_DOES_NOT_EXIST, 0};
+ {ok, LocalMemberPid} ->
+ {?RESPONSE_CODE_OK,
+ case osiris:fetch_writer_seq(LocalMemberPid, Reference)
+ of
+ undefined ->
+ 0;
+ Offt ->
+ Offt
+ end}
+ end;
+ error ->
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?ACCESS_REFUSED,
+ 1),
+ {?RESPONSE_CODE_ACCESS_REFUSED, 0}
+ end,
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ {query_publisher_sequence, ResponseCode,
+ Sequence}}),
+ send(Transport, S, Frame),
+ {Connection, State};
+handle_frame_post_auth(Transport,
+ #stream_connection{publishers = Publishers,
+ publisher_to_ids = PubToIds} =
+ Connection0,
+ State,
+ {request, CorrelationId,
+ {delete_publisher, PublisherId}}) ->
+ case Publishers of
+ #{PublisherId := #publisher{stream = Stream, reference = Ref}} ->
+ Connection1 =
+ Connection0#stream_connection{publishers =
+ maps:remove(PublisherId,
+ Publishers),
+ publisher_to_ids =
+ maps:remove({Stream, Ref},
+ PubToIds)},
+ Connection2 =
+ maybe_clean_connection_from_stream(Stream, Connection1),
+ response(Transport,
+ Connection1,
+ delete_publisher,
+ CorrelationId,
+ ?RESPONSE_CODE_OK),
+ rabbit_stream_metrics:publisher_deleted(self(),
+ stream_r(Stream,
+ Connection2),
+ PublisherId),
+ {Connection2, State};
+ _ ->
+ response(Transport,
+ Connection0,
+ delete_publisher,
+ CorrelationId,
+ ?RESPONSE_CODE_PUBLISHER_DOES_NOT_EXIST),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?PUBLISHER_DOES_NOT_EXIST,
+ 1),
+ {Connection0, State}
+ end;
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = Socket,
+ stream_subscriptions =
+ StreamSubscriptions,
+ virtual_host = VirtualHost,
+ user = User,
+ send_file_oct = SendFileOct,
+ transport = ConnTransport} =
+ Connection,
+ #stream_connection_state{consumers = Consumers} = State,
+ {request, CorrelationId,
+ {subscribe,
+ SubscriptionId,
+ Stream,
+ OffsetSpec,
+ Credit,
+ Properties}}) ->
+ QueueResource =
+ #resource{name = Stream,
+ kind = queue,
+ virtual_host = VirtualHost},
+ %% FIXME check the max number of subs is not reached already
+ case rabbit_stream_utils:check_read_permitted(QueueResource, User,
+ #{})
+ of
ok ->
- case rabbit_stream_manager:lookup_local_member(VirtualHost, Stream) of
+ case rabbit_stream_manager:lookup_local_member(VirtualHost, Stream)
+ of
{error, not_available} ->
- response(Transport, Connection, ?COMMAND_SUBSCRIBE, CorrelationId, ?RESPONSE_CODE_STREAM_NOT_AVAILABLE),
- {Connection, State, Rest};
+ response(Transport,
+ Connection,
+ subscribe,
+ CorrelationId,
+ ?RESPONSE_CODE_STREAM_NOT_AVAILABLE),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_NOT_AVAILABLE,
+ 1),
+ {Connection, State};
{error, not_found} ->
- response(Transport, Connection, ?COMMAND_SUBSCRIBE, CorrelationId, ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST),
- {Connection, State, Rest};
+ response(Transport,
+ Connection,
+ subscribe,
+ CorrelationId,
+ ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_DOES_NOT_EXIST,
+ 1),
+ {Connection, State};
{ok, LocalMemberPid} ->
- case subscription_exists(StreamSubscriptions, SubscriptionId) of
+ case subscription_exists(StreamSubscriptions,
+ SubscriptionId)
+ of
true ->
- response(Transport, Connection, ?COMMAND_SUBSCRIBE, CorrelationId, ?RESPONSE_CODE_SUBSCRIPTION_ID_ALREADY_EXISTS),
- {Connection, State, Rest};
+ response(Transport,
+ Connection,
+ subscribe,
+ CorrelationId,
+ ?RESPONSE_CODE_SUBSCRIPTION_ID_ALREADY_EXISTS),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?SUBSCRIPTION_ID_ALREADY_EXISTS,
+ 1),
+ {Connection, State};
false ->
- {OffsetSpec, Credit} = case OffsetType of
- ?OFFSET_TYPE_FIRST ->
- <<Crdt:16>> = OffsetAndCredit,
- {first, Crdt};
- ?OFFSET_TYPE_LAST ->
- <<Crdt:16>> = OffsetAndCredit,
- {last, Crdt};
- ?OFFSET_TYPE_NEXT ->
- <<Crdt:16>> = OffsetAndCredit,
- {next, Crdt};
- ?OFFSET_TYPE_OFFSET ->
- <<Offset:64/unsigned, Crdt:16>> = OffsetAndCredit,
- {Offset, Crdt};
- ?OFFSET_TYPE_TIMESTAMP ->
- <<Timestamp:64/signed, Crdt:16>> = OffsetAndCredit,
- {{timestamp, Timestamp}, Crdt}
- end,
- {ok, Segment} = osiris:init_reader(LocalMemberPid, OffsetSpec),
- ConsumerState = #consumer{
- member_pid = LocalMemberPid, offset = OffsetSpec, subscription_id = SubscriptionId, socket = Socket,
- segment = Segment,
- credit = Credit,
- stream = Stream
- },
-
- Connection1 = maybe_monitor_stream(LocalMemberPid, Stream, Connection),
-
- response_ok(Transport, Connection, ?COMMAND_SUBSCRIBE, CorrelationId),
-
- {{segment, Segment1}, {credit, Credit1}} = send_chunks(
- Transport,
- ConsumerState,
- SendFileOct
- ),
- Consumers1 = Consumers#{SubscriptionId => ConsumerState#consumer{segment = Segment1, credit = Credit1}},
-
- StreamSubscriptions1 =
- case StreamSubscriptions of
- #{Stream := SubscriptionIds} ->
- StreamSubscriptions#{Stream => [SubscriptionId] ++ SubscriptionIds};
- _ ->
- StreamSubscriptions#{Stream => [SubscriptionId]}
- end,
- {Connection1#stream_connection{stream_subscriptions = StreamSubscriptions1}, State#stream_connection_state{consumers = Consumers1}, Rest}
+ rabbit_log:debug("Creating subscription ~p to ~p, with offset specificat"
+ "ion ~p, properties ~0p",
+ [SubscriptionId,
+ Stream,
+ OffsetSpec,
+ Properties]),
+ CounterSpec =
+ {{?MODULE,
+ QueueResource,
+ SubscriptionId,
+ self()},
+ []},
+ Options =
+ #{transport => ConnTransport,
+ chunk_selector =>
+ get_chunk_selector(Properties)},
+ {ok, Log} =
+ osiris:init_reader(LocalMemberPid,
+ OffsetSpec,
+ CounterSpec,
+ Options),
+ rabbit_log:debug("Next offset for subscription ~p is ~p",
+ [SubscriptionId,
+ osiris_log:next_offset(Log)]),
+ ConsumerCounters =
+ atomics:new(2, [{signed, false}]),
+ ConsumerConfiguration =
+ #consumer_configuration{member_pid =
+ LocalMemberPid,
+ subscription_id =
+ SubscriptionId,
+ socket = Socket,
+ stream = Stream,
+ offset = OffsetSpec,
+ counters =
+ ConsumerCounters,
+ properties =
+ Properties},
+ ConsumerState =
+ #consumer{configuration = ConsumerConfiguration,
+ log = Log,
+ credit = Credit},
+
+ Connection1 =
+ maybe_monitor_stream(LocalMemberPid, Stream,
+ Connection),
+
+ response_ok(Transport,
+ Connection,
+ subscribe,
+ CorrelationId),
+
+ rabbit_log:debug("Distributing existing messages to subscription ~p",
+ [SubscriptionId]),
+
+ case send_chunks(Transport, ConsumerState,
+ SendFileOct)
+ of
+ {error, closed} ->
+ rabbit_log_connection:info("Stream protocol connection has been closed by "
+ "peer",
+ []),
+ throw({stop, normal});
+ {{segment, Log1}, {credit, Credit1}} ->
+ ConsumerState1 =
+ ConsumerState#consumer{log = Log1,
+ credit =
+ Credit1},
+ Consumers1 =
+ Consumers#{SubscriptionId =>
+ ConsumerState1},
+
+ StreamSubscriptions1 =
+ case StreamSubscriptions of
+ #{Stream := SubscriptionIds} ->
+ StreamSubscriptions#{Stream =>
+ [SubscriptionId]
+ ++ SubscriptionIds};
+ _ ->
+ StreamSubscriptions#{Stream =>
+ [SubscriptionId]}
+ end,
+
+ #consumer{configuration =
+ #consumer_configuration{counters
+ =
+ ConsumerCounters1}} =
+ ConsumerState1,
+
+ ConsumerOffset =
+ osiris_log:next_offset(Log1),
+ ConsumerOffsetLag =
+ consumer_i(offset_lag, ConsumerState1),
+
+ rabbit_log:debug("Subscription ~p is now at offset ~p with ~p message(s) "
+ "distributed after subscription",
+ [SubscriptionId,
+ ConsumerOffset,
+ messages_consumed(ConsumerCounters1)]),
+
+ rabbit_stream_metrics:consumer_created(self(),
+ stream_r(Stream,
+ Connection1),
+ SubscriptionId,
+ Credit1,
+ messages_consumed(ConsumerCounters1),
+ ConsumerOffset,
+ ConsumerOffsetLag,
+ Properties),
+ {Connection1#stream_connection{stream_subscriptions
+ =
+ StreamSubscriptions1},
+ State#stream_connection_state{consumers =
+ Consumers1}}
+ end
end
end;
error ->
- response(Transport, Connection, ?COMMAND_SUBSCRIBE, CorrelationId, ?RESPONSE_CODE_ACCESS_REFUSED),
- {Connection, State, Rest}
+ response(Transport,
+ Connection,
+ subscribe,
+ CorrelationId,
+ ?RESPONSE_CODE_ACCESS_REFUSED),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?ACCESS_REFUSED,
+ 1),
+ {Connection, State}
end;
-handle_frame_post_auth(Transport, #stream_connection{stream_subscriptions = StreamSubscriptions,
- stream_leaders = StreamLeaders} = Connection,
- #stream_connection_state{consumers = Consumers} = State,
- <<?COMMAND_UNSUBSCRIBE:16, ?VERSION_0:16, CorrelationId:32, SubscriptionId:8/unsigned>>, Rest) ->
- case subscription_exists(StreamSubscriptions, SubscriptionId) of
- false ->
- response(Transport, Connection, ?COMMAND_UNSUBSCRIBE, CorrelationId, ?RESPONSE_CODE_SUBSCRIPTION_ID_DOES_NOT_EXIST),
- {Connection, State, Rest};
- true ->
- #{SubscriptionId := Consumer} = Consumers,
- Stream = Consumer#consumer.stream,
- #{Stream := SubscriptionsForThisStream} = StreamSubscriptions,
- SubscriptionsForThisStream1 = lists:delete(SubscriptionId, SubscriptionsForThisStream),
- {Connection1, StreamSubscriptions1, StreamLeaders1} =
- case length(SubscriptionsForThisStream1) of
- 0 ->
- %% no more subscriptions for this stream
- %% we unregister even though it could affect publishing if the stream is published to
- %% from this connection and is deleted.
- %% to mitigate this, we remove the stream from the leaders cache
- %% this way the stream leader will be looked up in the next publish command
- %% and registered to.
- C = demonitor_stream(Stream, Connection),
- {C, maps:remove(Stream, StreamSubscriptions),
- maps:remove(Stream, StreamLeaders)
- };
- _ ->
- {Connection, StreamSubscriptions#{Stream => SubscriptionsForThisStream1}, StreamLeaders}
- end,
- Consumers1 = maps:remove(SubscriptionId, Consumers),
- response_ok(Transport, Connection, ?COMMAND_SUBSCRIBE, CorrelationId),
- {Connection1#stream_connection{
- stream_subscriptions = StreamSubscriptions1,
- stream_leaders = StreamLeaders1
- }, State#stream_connection_state{consumers = Consumers1}, Rest}
- end;
-handle_frame_post_auth(Transport, #stream_connection{socket = S, send_file_oct = SendFileOct} = Connection,
- #stream_connection_state{consumers = Consumers} = State,
- <<?COMMAND_CREDIT:16, ?VERSION_0:16, SubscriptionId:8/unsigned, Credit:16/signed>>, Rest) ->
-
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S,
+ send_file_oct = SendFileOct} =
+ Connection,
+ #stream_connection_state{consumers = Consumers} = State,
+ {credit, SubscriptionId, Credit}) ->
case Consumers of
#{SubscriptionId := Consumer} ->
#consumer{credit = AvailableCredit} = Consumer,
-
- {{segment, Segment1}, {credit, Credit1}} = send_chunks(
- Transport,
- Consumer,
- AvailableCredit + Credit,
- SendFileOct
- ),
-
- Consumer1 = Consumer#consumer{segment = Segment1, credit = Credit1},
- {Connection, State#stream_connection_state{consumers = Consumers#{SubscriptionId => Consumer1}}, Rest};
+ case send_chunks(Transport,
+ Consumer,
+ AvailableCredit + Credit,
+ SendFileOct)
+ of
+ {error, closed} ->
+ rabbit_log_connection:info("Stream protocol connection has been closed by "
+ "peer",
+ []),
+ throw({stop, normal});
+ {{segment, Log1}, {credit, Credit1}} ->
+ Consumer1 = Consumer#consumer{log = Log1, credit = Credit1},
+ {Connection,
+ State#stream_connection_state{consumers =
+ Consumers#{SubscriptionId
+ =>
+ Consumer1}}}
+ end;
_ ->
- rabbit_log:warning("Giving credit to unknown subscription: ~p~n", [SubscriptionId]),
- Frame = <<?COMMAND_CREDIT:16, ?VERSION_0:16, ?RESPONSE_CODE_SUBSCRIPTION_ID_DOES_NOT_EXIST:16, SubscriptionId:8>>,
- FrameSize = byte_size(Frame),
- Transport:send(S, [<<FrameSize:32>>, Frame]),
- {Connection, State, Rest}
+ rabbit_log:warning("Giving credit to unknown subscription: ~p",
+ [SubscriptionId]),
+
+ Code = ?RESPONSE_CODE_SUBSCRIPTION_ID_DOES_NOT_EXIST,
+ Frame =
+ rabbit_stream_core:frame({response, 1,
+ {credit, Code, SubscriptionId}}),
+ send(Transport, S, Frame),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?SUBSCRIPTION_ID_DOES_NOT_EXIST,
+ 1),
+ {Connection, State}
end;
-handle_frame_post_auth(_Transport, #stream_connection{virtual_host = VirtualHost, user = User} = Connection,
- State,
- <<?COMMAND_COMMIT_OFFSET:16, ?VERSION_0:16, _CorrelationId:32,
- ReferenceSize:16, Reference:ReferenceSize/binary,
- StreamSize:16, Stream:StreamSize/binary, Offset:64>>, Rest) ->
-
- case rabbit_stream_utils:check_write_permitted(
- #resource{name = Stream, kind = queue, virtual_host = VirtualHost},
- User,
- #{}) of
+handle_frame_post_auth(_Transport,
+ #stream_connection{virtual_host = VirtualHost,
+ user = User} =
+ Connection,
+ State,
+ {store_offset, Reference, Stream, Offset}) ->
+ case rabbit_stream_utils:check_write_permitted(#resource{name =
+ Stream,
+ kind = queue,
+ virtual_host =
+ VirtualHost},
+ User, #{})
+ of
ok ->
case lookup_leader(Stream, Connection) of
- cluster_not_found ->
- rabbit_log:info("Could not find leader to commit offset on ~p~n", [Stream]),
- %% FIXME commit offset is fire-and-forget, so no response even if error, change this?
- {Connection, State, Rest};
+ {error, Error} ->
+ rabbit_log:warning("Could not find leader to store offset on ~p: ~p",
+ [Stream, Error]),
+ %% FIXME store offset is fire-and-forget, so no response even if error, change this?
+ {Connection, State};
{ClusterLeader, Connection1} ->
osiris:write_tracking(ClusterLeader, Reference, Offset),
- {Connection1, State, Rest}
+ {Connection1, State}
end;
error ->
- %% FIXME commit offset is fire-and-forget, so no response even if error, change this?
- rabbit_log:info("Not authorized to commit offset on ~p~n", [Stream]),
- {Connection, State, Rest}
+ %% FIXME store offset is fire-and-forget, so no response even if error, change this?
+ rabbit_log:warning("Not authorized to store offset on stream ~p",
+ [Stream]),
+ {Connection, State}
end;
-handle_frame_post_auth(Transport, #stream_connection{socket = S, virtual_host = VirtualHost, user = User} = Connection,
- State,
- <<?COMMAND_QUERY_OFFSET:16, ?VERSION_0:16, CorrelationId:32,
- ReferenceSize:16, Reference:ReferenceSize/binary,
- StreamSize:16, Stream:StreamSize/binary>>, Rest) ->
- FrameSize = ?RESPONSE_FRAME_SIZE + 8,
- {ResponseCode, Offset} = case rabbit_stream_utils:check_read_permitted(
- #resource{name = Stream, kind = queue, virtual_host = VirtualHost},
- User,
- #{}) of
- ok ->
- case rabbit_stream_manager:lookup_local_member(VirtualHost, Stream) of
- {error, not_found} ->
- {?RESPONSE_CODE_STREAM_DOES_NOT_EXIST, 0};
- {ok, LocalMemberPid} ->
- {?RESPONSE_CODE_OK, case osiris:read_tracking(LocalMemberPid, Reference) of
- undefined ->
- 0;
- Offt ->
- Offt
- end}
- end;
- error ->
- {?RESPONSE_CODE_ACCESS_REFUSED, 0}
- end,
- Transport:send(S, [<<FrameSize:32, ?COMMAND_QUERY_OFFSET:16, ?VERSION_0:16>>,
- <<CorrelationId:32>>, <<ResponseCode:16>>, <<Offset:64>>]),
- {Connection, State, Rest};
-handle_frame_post_auth(Transport, #stream_connection{virtual_host = VirtualHost, user = #user{username = Username} = User} = Connection,
- State,
- <<?COMMAND_CREATE_STREAM:16, ?VERSION_0:16, CorrelationId:32, StreamSize:16, Stream:StreamSize/binary,
- ArgumentsCount:32, ArgumentsBinary/binary>>, Rest) ->
- case rabbit_stream_utils:enforce_correct_stream_name(Stream) of
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S,
+ virtual_host = VirtualHost,
+ user = User} =
+ Connection0,
+ State,
+ {request, CorrelationId,
+ {query_offset, Reference, Stream}}) ->
+ {ResponseCode, Offset, Connection1} =
+ case rabbit_stream_utils:check_read_permitted(#resource{name = Stream,
+ kind = queue,
+ virtual_host =
+ VirtualHost},
+ User, #{})
+ of
+ ok ->
+ case lookup_leader(Stream, Connection0) of
+ {error, not_found} ->
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_DOES_NOT_EXIST,
+ 1),
+ {?RESPONSE_CODE_STREAM_DOES_NOT_EXIST, 0, Connection0};
+ {error, not_available} ->
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_NOT_AVAILABLE,
+ 1),
+ {?RESPONSE_CODE_STREAM_NOT_AVAILABLE, 0, Connection0};
+ {LeaderPid, C} ->
+ {RC, O} =
+ case osiris:read_tracking(LeaderPid, Reference) of
+ undefined ->
+ {?RESPONSE_CODE_NO_OFFSET, 0};
+ {offset, Offt} ->
+ {?RESPONSE_CODE_OK, Offt}
+ end,
+ {RC, O, C}
+ end;
+ error ->
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?ACCESS_REFUSED,
+ 1),
+ {?RESPONSE_CODE_ACCESS_REFUSED, 0, Connection0}
+ end,
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ {query_offset, ResponseCode, Offset}}),
+ send(Transport, S, Frame),
+ {Connection1, State};
+handle_frame_post_auth(Transport,
+ #stream_connection{stream_subscriptions =
+ StreamSubscriptions} =
+ Connection,
+ #stream_connection_state{} = State,
+ {request, CorrelationId,
+ {unsubscribe, SubscriptionId}}) ->
+ case subscription_exists(StreamSubscriptions, SubscriptionId) of
+ false ->
+ response(Transport,
+ Connection,
+ unsubscribe,
+ CorrelationId,
+ ?RESPONSE_CODE_SUBSCRIPTION_ID_DOES_NOT_EXIST),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?SUBSCRIPTION_ID_DOES_NOT_EXIST,
+ 1),
+ {Connection, State};
+ true ->
+ {Connection1, State1} =
+ remove_subscription(SubscriptionId, Connection, State),
+ response_ok(Transport, Connection, unsubscribe, CorrelationId),
+ {Connection1, State1}
+ end;
+handle_frame_post_auth(Transport,
+ #stream_connection{virtual_host = VirtualHost,
+ user =
+ #user{username = Username} =
+ User} =
+ Connection,
+ State,
+ {request, CorrelationId,
+ {create_stream, Stream, Arguments}}) ->
+ case rabbit_stream_utils:enforce_correct_name(Stream) of
{ok, StreamName} ->
- {Arguments, _Rest} = rabbit_stream_utils:parse_map(ArgumentsBinary, ArgumentsCount),
- case rabbit_stream_utils:check_configure_permitted(
- #resource{name = StreamName, kind = queue, virtual_host = VirtualHost},
- User,
- #{}) of
+ case rabbit_stream_utils:check_configure_permitted(#resource{name =
+ StreamName,
+ kind =
+ queue,
+ virtual_host
+ =
+ VirtualHost},
+ User, #{})
+ of
ok ->
- case rabbit_stream_manager:create(VirtualHost, StreamName, Arguments, Username) of
- {ok, #{leader_pid := LeaderPid, replica_pids := ReturnedReplicas}} ->
- rabbit_log:info("Created cluster with leader ~p and replicas ~p~n", [LeaderPid, ReturnedReplicas]),
- response_ok(Transport, Connection, ?COMMAND_CREATE_STREAM, CorrelationId),
- {Connection, State, Rest};
+ case rabbit_stream_manager:create(VirtualHost,
+ StreamName,
+ Arguments,
+ Username)
+ of
+ {ok,
+ #{leader_node := LeaderPid,
+ replica_nodes := ReturnedReplicas}} ->
+ rabbit_log:debug("Created stream cluster with leader on ~p and "
+ "replicas on ~p",
+ [LeaderPid, ReturnedReplicas]),
+ response_ok(Transport,
+ Connection,
+ create_stream,
+ CorrelationId),
+ {Connection, State};
{error, validation_failed} ->
- response(Transport, Connection, ?COMMAND_CREATE_STREAM, CorrelationId, ?RESPONSE_CODE_PRECONDITION_FAILED),
- {Connection, State, Rest};
+ response(Transport,
+ Connection,
+ create_stream,
+ CorrelationId,
+ ?RESPONSE_CODE_PRECONDITION_FAILED),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?PRECONDITION_FAILED,
+ 1),
+ {Connection, State};
{error, reference_already_exists} ->
- response(Transport, Connection, ?COMMAND_CREATE_STREAM, CorrelationId, ?RESPONSE_CODE_STREAM_ALREADY_EXISTS),
- {Connection, State, Rest};
+ response(Transport,
+ Connection,
+ create_stream,
+ CorrelationId,
+ ?RESPONSE_CODE_STREAM_ALREADY_EXISTS),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_ALREADY_EXISTS,
+ 1),
+ {Connection, State};
{error, _} ->
- response(Transport, Connection, ?COMMAND_CREATE_STREAM, CorrelationId, ?RESPONSE_CODE_INTERNAL_ERROR),
- {Connection, State, Rest}
+ response(Transport,
+ Connection,
+ create_stream,
+ CorrelationId,
+ ?RESPONSE_CODE_INTERNAL_ERROR),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?INTERNAL_ERROR,
+ 1),
+ {Connection, State}
end;
error ->
- response(Transport, Connection, ?COMMAND_CREATE_STREAM, CorrelationId, ?RESPONSE_CODE_ACCESS_REFUSED),
- {Connection, State, Rest}
+ response(Transport,
+ Connection,
+ create_stream,
+ CorrelationId,
+ ?RESPONSE_CODE_ACCESS_REFUSED),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?ACCESS_REFUSED,
+ 1),
+ {Connection, State}
end;
_ ->
- response(Transport, Connection, ?COMMAND_CREATE_STREAM, CorrelationId, ?RESPONSE_CODE_PRECONDITION_FAILED),
- {Connection, State, Rest}
+ response(Transport,
+ Connection,
+ create_stream,
+ CorrelationId,
+ ?RESPONSE_CODE_PRECONDITION_FAILED),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?PRECONDITION_FAILED,
+ 1),
+ {Connection, State}
end;
-handle_frame_post_auth(Transport, #stream_connection{socket = S, virtual_host = VirtualHost,
- user = #user{username = Username} = User} = Connection, State,
- <<?COMMAND_DELETE_STREAM:16, ?VERSION_0:16, CorrelationId:32, StreamSize:16, Stream:StreamSize/binary>>, Rest) ->
- case rabbit_stream_utils:check_configure_permitted(
- #resource{name = Stream, kind = queue, virtual_host = VirtualHost},
- User,
- #{}) of
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S,
+ virtual_host = VirtualHost,
+ user =
+ #user{username = Username} =
+ User} =
+ Connection,
+ State,
+ {request, CorrelationId, {delete_stream, Stream}}) ->
+ case rabbit_stream_utils:check_configure_permitted(#resource{name =
+ Stream,
+ kind = queue,
+ virtual_host =
+ VirtualHost},
+ User, #{})
+ of
ok ->
case rabbit_stream_manager:delete(VirtualHost, Stream, Username) of
{ok, deleted} ->
- response_ok(Transport, Connection, ?COMMAND_DELETE_STREAM, CorrelationId),
- {Connection1, State1} = case clean_state_after_stream_deletion_or_failure(Stream, Connection, State) of
- {cleaned, NewConnection, NewState} ->
- StreamSize = byte_size(Stream),
- FrameSize = 2 + 2 + 2 + 2 + StreamSize,
- Transport:send(S, [<<FrameSize:32, ?COMMAND_METADATA_UPDATE:16, ?VERSION_0:16,
- ?RESPONSE_CODE_STREAM_NOT_AVAILABLE:16, StreamSize:16, Stream/binary>>]),
- {NewConnection, NewState};
- {not_cleaned, SameConnection, SameState} ->
- {SameConnection, SameState}
- end,
- {Connection1, State1, Rest};
+ response_ok(Transport,
+ Connection,
+ delete_stream,
+ CorrelationId),
+ {Connection1, State1} =
+ case
+ clean_state_after_stream_deletion_or_failure(Stream,
+ Connection,
+ State)
+ of
+ {cleaned, NewConnection, NewState} ->
+ Command =
+ {metadata_update, Stream,
+ ?RESPONSE_CODE_STREAM_NOT_AVAILABLE},
+ Frame = rabbit_stream_core:frame(Command),
+ send(Transport, S, Frame),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_NOT_AVAILABLE,
+ 1),
+ {NewConnection, NewState};
+ {not_cleaned, SameConnection, SameState} ->
+ {SameConnection, SameState}
+ end,
+ {Connection1, State1};
{error, reference_not_found} ->
- response(Transport, Connection, ?COMMAND_DELETE_STREAM, CorrelationId, ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST),
- {Connection, State, Rest}
+ response(Transport,
+ Connection,
+ delete_stream,
+ CorrelationId,
+ ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_DOES_NOT_EXIST,
+ 1),
+ {Connection, State}
end;
error ->
- response(Transport, Connection, ?COMMAND_DELETE_STREAM, CorrelationId, ?RESPONSE_CODE_ACCESS_REFUSED),
- {Connection, State, Rest}
+ response(Transport,
+ Connection,
+ delete_stream,
+ CorrelationId,
+ ?RESPONSE_CODE_ACCESS_REFUSED),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?ACCESS_REFUSED,
+ 1),
+ {Connection, State}
end;
-handle_frame_post_auth(Transport, #stream_connection{socket = S, virtual_host = VirtualHost} = Connection, State,
- <<?COMMAND_METADATA:16, ?VERSION_0:16, CorrelationId:32, StreamCount:32, BinaryStreams/binary>>, Rest) ->
- Streams = rabbit_stream_utils:extract_stream_list(BinaryStreams, []),
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S,
+ virtual_host = VirtualHost,
+ transport = TransportLayer} =
+ Connection,
+ State,
+ {request, CorrelationId, {metadata, Streams}}) ->
+ Topology =
+ lists:foldl(fun(Stream, Acc) ->
+ Acc#{Stream =>
+ rabbit_stream_manager:topology(VirtualHost,
+ Stream)}
+ end,
+ #{}, Streams),
%% get the nodes involved in the streams
- NodesMap = lists:foldl(fun(Stream, Acc) ->
- case rabbit_stream_manager:topology(VirtualHost, Stream) of
- {ok, #{leader_node := undefined, replica_nodes := ReplicaNodes}} ->
- lists:foldl(fun(ReplicaNode, NodesAcc) -> maps:put(ReplicaNode, ok, NodesAcc) end, Acc, ReplicaNodes);
- {ok, #{leader_node := LeaderNode, replica_nodes := ReplicaNodes}} ->
- Acc1 = maps:put(LeaderNode, ok, Acc),
- lists:foldl(fun(ReplicaNode, NodesAcc) -> maps:put(ReplicaNode, ok, NodesAcc) end, Acc1, ReplicaNodes);
+ NodesMap =
+ lists:foldl(fun(Stream, Acc) ->
+ case maps:get(Stream, Topology) of
+ {ok,
+ #{leader_node := undefined,
+ replica_nodes := ReplicaNodes}} ->
+ lists:foldl(fun(ReplicaNode, NodesAcc) ->
+ maps:put(ReplicaNode, ok,
+ NodesAcc)
+ end,
+ Acc, ReplicaNodes);
+ {ok,
+ #{leader_node := LeaderNode,
+ replica_nodes := ReplicaNodes}} ->
+ Acc1 = maps:put(LeaderNode, ok, Acc),
+ lists:foldl(fun(ReplicaNode, NodesAcc) ->
+ maps:put(ReplicaNode, ok,
+ NodesAcc)
+ end,
+ Acc1, ReplicaNodes);
+ {error, _} -> Acc
+ end
+ end,
+ #{}, Streams),
+
+ Nodes =
+ lists:sort(
+ maps:keys(NodesMap)),
+ NodeEndpoints =
+ lists:foldr(fun(Node, Acc) ->
+ PortFunction =
+ case TransportLayer of
+ tcp -> port;
+ ssl -> tls_port
+ end,
+ Host = rpc:call(Node, rabbit_stream, host, []),
+ Port = rpc:call(Node, rabbit_stream, PortFunction, []),
+ case {is_binary(Host), is_integer(Port)} of
+ {true, true} -> Acc#{Node => {Host, Port}};
+ _ ->
+ rabbit_log:warning("Error when retrieving broker metadata: ~p ~p",
+ [Host, Port]),
+ Acc
+ end
+ end,
+ #{}, Nodes),
+
+ Metadata =
+ lists:foldl(fun(Stream, Acc) ->
+ case maps:get(Stream, Topology) of
+ {error, Err} -> Acc#{Stream => Err};
+ {ok,
+ #{leader_node := LeaderNode,
+ replica_nodes := Replicas}} ->
+ LeaderInfo =
+ case NodeEndpoints of
+ #{LeaderNode := Info} -> Info;
+ _ -> undefined
+ end,
+ ReplicaInfos =
+ lists:foldr(fun(Replica, A) ->
+ case NodeEndpoints of
+ #{Replica := I} ->
+ [I | A];
+ _ -> A
+ end
+ end,
+ [], Replicas),
+ Acc#{Stream => {LeaderInfo, ReplicaInfos}}
+ end
+ end,
+ #{}, Streams),
+ Endpoints =
+ lists:usort(
+ maps:values(NodeEndpoints)),
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ {metadata, Endpoints, Metadata}}),
+ send(Transport, S, Frame),
+ {Connection, State};
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S,
+ virtual_host = VirtualHost} =
+ Connection,
+ State,
+ {request, CorrelationId,
+ {route, RoutingKey, SuperStream}}) ->
+ {ResponseCode, StreamBin} =
+ case rabbit_stream_manager:route(RoutingKey, VirtualHost, SuperStream)
+ of
+ {ok, no_route} ->
+ {?RESPONSE_CODE_OK, <<0:32>>};
+ {ok, Streams} ->
+ StreamCount = length(Streams),
+ Bin = lists:foldl(fun(Stream, Acc) ->
+ StreamSize = byte_size(Stream),
+ <<Acc/binary, StreamSize:16,
+ Stream:StreamSize/binary>>
+ end,
+ <<StreamCount:32>>, Streams),
+ {?RESPONSE_CODE_OK, Bin};
{error, _} ->
- Acc
- end
- end, #{}, Streams),
-
- Nodes = maps:keys(NodesMap),
- {NodesInfo, _} = lists:foldl(fun(Node, {Acc, Index}) ->
- Host = rpc:call(Node, rabbit_stream, host, []),
- Port = rpc:call(Node, rabbit_stream, port, []),
- case {is_binary(Host), is_integer(Port)} of
- {true, true} ->
- {Acc#{Node => {{index, Index}, {host, Host}, {port, Port}}}, Index + 1};
- _ ->
- rabbit_log:warning("Error when retrieving broker metadata: ~p ~p~n", [Host, Port]),
- {Acc, Index}
- end
- end, {#{}, 0}, Nodes),
-
- BrokersCount = map_size(NodesInfo),
- BrokersBin = maps:fold(fun(_K, {{index, Index}, {host, Host}, {port, Port}}, Acc) ->
- HostLength = byte_size(Host),
- <<Acc/binary, Index:16, HostLength:16, Host:HostLength/binary, Port:32>>
- end, <<BrokersCount:32>>, NodesInfo),
-
-
- MetadataBin = lists:foldl(fun(Stream, Acc) ->
- StreamLength = byte_size(Stream),
- case rabbit_stream_manager:topology(VirtualHost, Stream) of
- {error, stream_not_found} ->
- <<Acc/binary, StreamLength:16, Stream:StreamLength/binary, ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST:16,
- -1:16, 0:32>>;
- {error, stream_not_available} ->
- <<Acc/binary, StreamLength:16, Stream:StreamLength/binary, ?RESPONSE_CODE_STREAM_NOT_AVAILABLE:16,
- -1:16, 0:32>>;
- {ok, #{leader_node := LeaderNode, replica_nodes := Replicas}} ->
- LeaderIndex = case NodesInfo of
- #{LeaderNode := NodeInfo} ->
- {{index, LeaderIdx}, {host, _}, {port, _}} = NodeInfo,
- LeaderIdx;
- _ ->
- -1
- end,
- {ReplicasBinary, ReplicasCount} = lists:foldl(fun(Replica, {Bin, Count}) ->
- case NodesInfo of
- #{Replica := NI} ->
- {{index, ReplicaIndex}, {host, _}, {port, _}} = NI,
- {<<Bin/binary, ReplicaIndex:16>>, Count + 1};
- _ ->
- {Bin, Count}
- end
-
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_DOES_NOT_EXIST,
+ 1),
+ {?RESPONSE_CODE_STREAM_DOES_NOT_EXIST, <<0:32>>}
+ end,
- end, {<<>>, 0}, Replicas),
- <<Acc/binary, StreamLength:16, Stream:StreamLength/binary, ?RESPONSE_CODE_OK:16,
- LeaderIndex:16, ReplicasCount:32, ReplicasBinary/binary>>
- end
+ Frame =
+ <<?COMMAND_ROUTE:16,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ResponseCode:16,
+ StreamBin/binary>>,
+ FrameSize = byte_size(Frame),
+ Transport:send(S, <<FrameSize:32, Frame/binary>>),
+ {Connection, State};
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S,
+ virtual_host = VirtualHost} =
+ Connection,
+ State,
+ {request, CorrelationId, {partitions, SuperStream}}) ->
+ {ResponseCode, PartitionsBin} =
+ case rabbit_stream_manager:partitions(VirtualHost, SuperStream) of
+ {ok, []} ->
+ {?RESPONSE_CODE_OK, <<0:32>>};
+ {ok, Streams} ->
+ StreamCount = length(Streams),
+ Bin = lists:foldl(fun(Stream, Acc) ->
+ StreamSize = byte_size(Stream),
+ <<Acc/binary, StreamSize:16,
+ Stream:StreamSize/binary>>
+ end,
+ <<StreamCount:32>>, Streams),
+ {?RESPONSE_CODE_OK, Bin};
+ {error, _} ->
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?STREAM_DOES_NOT_EXIST,
+ 1),
+ {?RESPONSE_CODE_STREAM_DOES_NOT_EXIST, <<0:32>>}
+ end,
- end, <<StreamCount:32>>, Streams),
- Frame = <<?COMMAND_METADATA:16, ?VERSION_0:16, CorrelationId:32, BrokersBin/binary, MetadataBin/binary>>,
+ Frame =
+ <<?COMMAND_PARTITIONS:16,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ResponseCode:16,
+ PartitionsBin/binary>>,
FrameSize = byte_size(Frame),
Transport:send(S, <<FrameSize:32, Frame/binary>>),
- {Connection, State, Rest};
-handle_frame_post_auth(Transport, Connection, State,
- <<?COMMAND_CLOSE:16, ?VERSION_0:16, CorrelationId:32,
- ClosingCode:16, ClosingReasonLength:16, ClosingReason:ClosingReasonLength/binary>>, _Rest) ->
- rabbit_log:info("Received close command ~p ~p~n", [ClosingCode, ClosingReason]),
- Frame = <<?COMMAND_CLOSE:16, ?VERSION_0:16, CorrelationId:32, ?RESPONSE_CODE_OK:16>>,
- frame(Transport, Connection, Frame),
- {Connection#stream_connection{connection_step = closing}, State, <<>>}; %% we ignore any subsequent frames
-handle_frame_post_auth(_Transport, Connection, State, <<?COMMAND_HEARTBEAT:16, ?VERSION_0:16>>, Rest) ->
- rabbit_log:info("Received heartbeat frame post auth~n"),
- {Connection, State, Rest};
-handle_frame_post_auth(Transport, Connection, State, Frame, Rest) ->
- rabbit_log:warning("unknown frame ~p ~p, sending close command.~n", [Frame, Rest]),
+ {Connection, State};
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S} = Connection,
+ State,
+ {request, CorrelationId,
+ {close, ClosingCode, ClosingReason}}) ->
+ rabbit_log:debug("Stream protocol reader received close command "
+ "~p ~p",
+ [ClosingCode, ClosingReason]),
+ Frame =
+ rabbit_stream_core:frame({response, CorrelationId,
+ {close, ?RESPONSE_CODE_OK}}),
+ Transport:send(S, Frame),
+ {Connection#stream_connection{connection_step = closing},
+ State}; %% we ignore any subsequent frames
+handle_frame_post_auth(_Transport, Connection, State, heartbeat) ->
+ rabbit_log:debug("Received heartbeat frame post auth"),
+ {Connection, State};
+handle_frame_post_auth(Transport,
+ #stream_connection{socket = S} = Connection,
+ State,
+ Command) ->
+ rabbit_log:warning("unknown command ~p, sending close command.",
+ [Command]),
CloseReason = <<"unknown frame">>,
- CloseReasonLength = byte_size(CloseReason),
- CloseFrame = <<?COMMAND_CLOSE:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_UNKNOWN_FRAME:16,
- CloseReasonLength:16, CloseReason:CloseReasonLength/binary>>,
- frame(Transport, Connection, CloseFrame),
- {Connection#stream_connection{connection_step = close_sent}, State, Rest}.
-
-notify_connection_closed(#stream_connection{name = Name} = Connection, ConnectionState) ->
+ Frame =
+ rabbit_stream_core:frame({request, 1,
+ {close, ?RESPONSE_CODE_UNKNOWN_FRAME,
+ CloseReason}}),
+ send(Transport, S, Frame),
+ rabbit_global_counters:increase_protocol_counter(stream,
+ ?UNKNOWN_FRAME, 1),
+ {Connection#stream_connection{connection_step = close_sent}, State}.
+
+notify_connection_closed(#statem_data{connection =
+ #stream_connection{name = Name,
+ publishers =
+ Publishers} =
+ Connection,
+ connection_state =
+ #stream_connection_state{consumers =
+ Consumers} =
+ ConnectionState}) ->
rabbit_core_metrics:connection_closed(self()),
+ [rabbit_stream_metrics:consumer_cancelled(self(),
+ stream_r(S, Connection), SubId)
+ || #consumer{configuration =
+ #consumer_configuration{stream = S,
+ subscription_id = SubId}}
+ <- maps:values(Consumers)],
+ [rabbit_stream_metrics:publisher_deleted(self(),
+ stream_r(S, Connection), PubId)
+ || #publisher{stream = S, publisher_id = PubId}
+ <- maps:values(Publishers)],
ClientProperties = i(client_properties, Connection, ConnectionState),
- EventProperties = [{name, Name},
- {pid, self()},
- {node, node()},
- {client_properties, ClientProperties}],
+ EventProperties =
+ [{name, Name},
+ {pid, self()},
+ {node, node()},
+ {client_properties, ClientProperties}],
rabbit_event:notify(connection_closed,
- augment_infos_with_user_provided_connection_name(EventProperties, Connection)).
-
-handle_frame_post_close(_Transport, Connection, State,
- <<?COMMAND_CLOSE:16, ?VERSION_0:16, _CorrelationId:32, _ResponseCode:16>>, Rest) ->
- rabbit_log:info("Received close confirmation~n"),
- {Connection#stream_connection{connection_step = closing_done}, State, Rest};
-handle_frame_post_close(_Transport, Connection, State, <<?COMMAND_HEARTBEAT:16, ?VERSION_0:16>>, Rest) ->
- rabbit_log:info("Received heartbeat frame post close~n"),
- {Connection, State, Rest};
-handle_frame_post_close(_Transport, Connection, State, Frame, Rest) ->
- rabbit_log:warning("ignored frame on close ~p ~p.~n", [Frame, Rest]),
- {Connection, State, Rest}.
-
-clean_state_after_stream_deletion_or_failure(Stream, #stream_connection{stream_leaders = StreamLeaders, stream_subscriptions = StreamSubscriptions} = Connection,
- #stream_connection_state{consumers = Consumers} = State) ->
- case {maps:is_key(Stream, StreamSubscriptions), maps:is_key(Stream, StreamLeaders)} of
- {true, _} ->
- #{Stream := SubscriptionIds} = StreamSubscriptions,
- {cleaned, Connection#stream_connection{
- stream_leaders = maps:remove(Stream, StreamLeaders),
- stream_subscriptions = maps:remove(Stream, StreamSubscriptions)
- }, State#stream_connection_state{consumers = maps:without(SubscriptionIds, Consumers)}};
- {false, true} ->
- {cleaned, Connection#stream_connection{
- stream_leaders = maps:remove(Stream, StreamLeaders)
- }, State};
- {false, false} ->
- {not_cleaned, Connection, State}
+ augment_infos_with_user_provided_connection_name(EventProperties,
+ Connection)).
+
+handle_frame_post_close(_Transport,
+ Connection,
+ State,
+ {response, _CorrelationId, {close, _Code}}) ->
+ rabbit_log_connection:info("Received close confirmation from client"),
+ {Connection#stream_connection{connection_step = closing_done}, State};
+handle_frame_post_close(_Transport, Connection, State, heartbeat) ->
+ rabbit_log_connection:debug("Received heartbeat command post close"),
+ {Connection, State};
+handle_frame_post_close(_Transport, Connection, State, Command) ->
+ rabbit_log_connection:warning("ignored command on close ~p .",
+ [Command]),
+ {Connection, State}.
+
+stream_r(Stream, #stream_connection{virtual_host = VHost}) ->
+ #resource{name = Stream,
+ kind = queue,
+ virtual_host = VHost}.
+
+clean_state_after_stream_deletion_or_failure(Stream,
+ #stream_connection{stream_subscriptions
+ =
+ StreamSubscriptions,
+ publishers =
+ Publishers,
+ publisher_to_ids
+ =
+ PublisherToIds,
+ stream_leaders =
+ Leaders} =
+ C0,
+ #stream_connection_state{consumers
+ =
+ Consumers} =
+ S0) ->
+ {SubscriptionsCleaned, C1, S1} =
+ case stream_has_subscriptions(Stream, C0) of
+ true ->
+ #{Stream := SubscriptionIds} = StreamSubscriptions,
+ [rabbit_stream_metrics:consumer_cancelled(self(),
+ stream_r(Stream, C0),
+ SubId)
+ || SubId <- SubscriptionIds],
+ {true,
+ C0#stream_connection{stream_subscriptions =
+ maps:remove(Stream,
+ StreamSubscriptions)},
+ S0#stream_connection_state{consumers =
+ maps:without(SubscriptionIds,
+ Consumers)}};
+ false ->
+ {false, C0, S0}
+ end,
+ {PublishersCleaned, C2, S2} =
+ case stream_has_publishers(Stream, C1) of
+ true ->
+ {PurgedPubs, PurgedPubToIds} =
+ maps:fold(fun(PubId,
+ #publisher{stream = S, reference = Ref},
+ {Pubs, PubToIds}) ->
+ case S of
+ Stream ->
+ rabbit_stream_metrics:publisher_deleted(self(),
+ stream_r(S,
+ C1),
+ PubId),
+ {maps:remove(PubId, Pubs),
+ maps:remove({Stream, Ref}, PubToIds)};
+ _ -> {Pubs, PubToIds}
+ end
+ end,
+ {Publishers, PublisherToIds}, Publishers),
+ {true,
+ C1#stream_connection{publishers = PurgedPubs,
+ publisher_to_ids = PurgedPubToIds},
+ S1};
+ false ->
+ {false, C1, S1}
+ end,
+ {LeadersCleaned, Leaders1} =
+ case Leaders of
+ #{Stream := _} ->
+ {true, maps:remove(Stream, Leaders)};
+ _ ->
+ {false, Leaders}
+ end,
+ case SubscriptionsCleaned
+ orelse PublishersCleaned
+ orelse LeadersCleaned
+ of
+ true ->
+ C3 = demonitor_stream(Stream, C2),
+ {cleaned, C3#stream_connection{stream_leaders = Leaders1}, S2};
+ false ->
+ {not_cleaned, C2#stream_connection{stream_leaders = Leaders1}, S2}
end.
-lookup_leader(Stream, #stream_connection{stream_leaders = StreamLeaders, virtual_host = VirtualHost} = Connection) ->
+lookup_leader(Stream,
+ #stream_connection{stream_leaders = StreamLeaders,
+ virtual_host = VirtualHost} =
+ Connection) ->
case maps:get(Stream, StreamLeaders, undefined) of
undefined ->
case lookup_leader_from_manager(VirtualHost, Stream) of
- cluster_not_found ->
- cluster_not_found;
- LeaderPid ->
- Connection1 = maybe_monitor_stream(LeaderPid, Stream, Connection),
- {LeaderPid, Connection1#stream_connection{stream_leaders = StreamLeaders#{Stream => LeaderPid}}}
+ {error, Error} ->
+ {error, Error};
+ {ok, LeaderPid} ->
+ Connection1 =
+ maybe_monitor_stream(LeaderPid, Stream, Connection),
+ {LeaderPid,
+ Connection1#stream_connection{stream_leaders =
+ StreamLeaders#{Stream =>
+ LeaderPid}}}
end;
LeaderPid ->
{LeaderPid, Connection}
@@ -1114,161 +2564,477 @@ lookup_leader(Stream, #stream_connection{stream_leaders = StreamLeaders, virtual
lookup_leader_from_manager(VirtualHost, Stream) ->
rabbit_stream_manager:lookup_leader(VirtualHost, Stream).
-maybe_monitor_stream(Pid, Stream, #stream_connection{monitors = Monitors} = Connection) ->
+remove_subscription(SubscriptionId,
+ #stream_connection{stream_subscriptions =
+ StreamSubscriptions} =
+ Connection,
+ #stream_connection_state{consumers = Consumers} = State) ->
+ #{SubscriptionId := Consumer} = Consumers,
+ Stream =
+ Consumer#consumer.configuration#consumer_configuration.stream,
+ #{Stream := SubscriptionsForThisStream} = StreamSubscriptions,
+ SubscriptionsForThisStream1 =
+ lists:delete(SubscriptionId, SubscriptionsForThisStream),
+ StreamSubscriptions1 =
+ case length(SubscriptionsForThisStream1) of
+ 0 ->
+ % no more subscription for this stream
+ maps:remove(Stream, StreamSubscriptions);
+ _ ->
+ StreamSubscriptions#{Stream => SubscriptionsForThisStream1}
+ end,
+ Connection1 =
+ Connection#stream_connection{stream_subscriptions =
+ StreamSubscriptions1},
+ Consumers1 = maps:remove(SubscriptionId, Consumers),
+ Connection2 = maybe_clean_connection_from_stream(Stream, Connection1),
+ rabbit_stream_metrics:consumer_cancelled(self(),
+ stream_r(Stream, Connection2),
+ SubscriptionId),
+ {Connection2, State#stream_connection_state{consumers = Consumers1}}.
+
+maybe_clean_connection_from_stream(Stream,
+ #stream_connection{stream_leaders =
+ Leaders} =
+ Connection0) ->
+ Connection1 =
+ case {stream_has_publishers(Stream, Connection0),
+ stream_has_subscriptions(Stream, Connection0)}
+ of
+ {false, false} ->
+ demonitor_stream(Stream, Connection0);
+ _ ->
+ Connection0
+ end,
+ Connection1#stream_connection{stream_leaders =
+ maps:remove(Stream, Leaders)}.
+
+maybe_monitor_stream(Pid, Stream,
+ #stream_connection{monitors = Monitors} = Connection) ->
case lists:member(Stream, maps:values(Monitors)) of
true ->
Connection;
false ->
MonitorRef = monitor(process, Pid),
- Connection#stream_connection{monitors = maps:put(MonitorRef, Stream, Monitors)}
+ Connection#stream_connection{monitors =
+ maps:put(MonitorRef, Stream,
+ Monitors)}
end.
-demonitor_stream(Stream, #stream_connection{monitors = Monitors0} = Connection) ->
- Monitors = maps:fold(fun(MonitorRef, Strm, Acc) ->
- case Strm of
- Stream ->
- Acc;
- _ ->
- maps:put(MonitorRef, Strm, Acc)
-
- end
- end, #{}, Monitors0),
+demonitor_stream(Stream,
+ #stream_connection{monitors = Monitors0} = Connection) ->
+ Monitors =
+ maps:fold(fun(MonitorRef, Strm, Acc) ->
+ case Strm of
+ Stream ->
+ demonitor(MonitorRef, [flush]),
+ Acc;
+ _ -> maps:put(MonitorRef, Strm, Acc)
+ end
+ end,
+ #{}, Monitors0),
Connection#stream_connection{monitors = Monitors}.
-demonitor_all_streams(#stream_connection{monitors = Monitors} = Connection) ->
- lists:foreach(fun(MonitorRef) ->
- demonitor(MonitorRef, [flush])
- end, maps:keys(Monitors)),
- Connection#stream_connection{monitors = #{}}.
+stream_has_subscriptions(Stream,
+ #stream_connection{stream_subscriptions =
+ Subscriptions}) ->
+ case Subscriptions of
+ #{Stream := StreamSubscriptions}
+ when length(StreamSubscriptions) > 0 ->
+ true;
+ _ ->
+ false
+ end.
-frame(Transport, #stream_connection{socket = S}, Frame) ->
- FrameSize = byte_size(Frame),
- Transport:send(S, [<<FrameSize:32>>, Frame]).
+stream_has_publishers(Stream,
+ #stream_connection{publishers = Publishers}) ->
+ lists:any(fun(#publisher{stream = S}) ->
+ case S of
+ Stream -> true;
+ _ -> false
+ end
+ end,
+ maps:values(Publishers)).
-response_ok(Transport, State, CommandId, CorrelationId) ->
- response(Transport, State, CommandId, CorrelationId, ?RESPONSE_CODE_OK).
+demonitor_all_streams(#stream_connection{monitors = Monitors} =
+ Connection) ->
+ lists:foreach(fun(MonitorRef) -> demonitor(MonitorRef, [flush]) end,
+ maps:keys(Monitors)),
+ Connection#stream_connection{monitors = #{}}.
-response(Transport, #stream_connection{socket = S}, CommandId, CorrelationId, ResponseCode) ->
- Transport:send(S, [<<?RESPONSE_FRAME_SIZE:32, CommandId:16, ?VERSION_0:16>>, <<CorrelationId:32>>, <<ResponseCode:16>>]).
+response_ok(Transport, State, Command, CorrelationId) ->
+ response(Transport, State, Command, CorrelationId, ?RESPONSE_CODE_OK).
+
+response(Transport,
+ #stream_connection{socket = S},
+ Command,
+ CorrelationId,
+ ResponseCode)
+ when is_atom(Command) ->
+ send(Transport, S,
+ rabbit_stream_core:frame({response, CorrelationId,
+ {Command, ResponseCode}})).
subscription_exists(StreamSubscriptions, SubscriptionId) ->
- SubscriptionIds = lists:flatten(maps:values(StreamSubscriptions)),
+ SubscriptionIds =
+ lists:flatten(
+ maps:values(StreamSubscriptions)),
lists:any(fun(Id) -> Id =:= SubscriptionId end, SubscriptionIds).
-send_file_callback(Transport, #consumer{socket = S, subscription_id = SubscriptionId}, Counter) ->
- fun(Size) ->
- FrameSize = 2 + 2 + 1 + Size,
- FrameBeginning = <<FrameSize:32, ?COMMAND_DELIVER:16, ?VERSION_0:16, SubscriptionId:8/unsigned>>,
- Transport:send(S, FrameBeginning),
- atomics:add(Counter, 1, Size)
+send_file_callback(Transport,
+ #consumer{configuration =
+ #consumer_configuration{socket = S,
+ subscription_id =
+ SubscriptionId,
+ counters = Counters}},
+ Counter) ->
+ fun(#{chunk_id := FirstOffsetInChunk, num_entries := NumEntries},
+ Size) ->
+ FrameSize = 2 + 2 + 1 + Size,
+ FrameBeginning =
+ <<FrameSize:32,
+ ?REQUEST:1,
+ ?COMMAND_DELIVER:15,
+ ?VERSION_1:16,
+ SubscriptionId:8/unsigned>>,
+ Transport:send(S, FrameBeginning),
+ atomics:add(Counter, 1, Size),
+ increase_messages_consumed(Counters, NumEntries),
+ set_consumer_offset(Counters, FirstOffsetInChunk)
end.
send_chunks(Transport, #consumer{credit = Credit} = State, Counter) ->
send_chunks(Transport, State, Credit, Counter).
-send_chunks(_Transport, #consumer{segment = Segment}, 0, _Counter) ->
- {{segment, Segment}, {credit, 0}};
-send_chunks(Transport, #consumer{segment = Segment} = State, Credit, Counter) ->
- send_chunks(Transport, State, Segment, Credit, true, Counter).
-
-send_chunks(_Transport, _State, Segment, 0 = _Credit, _Retry, _Counter) ->
+send_chunks(_Transport, #consumer{log = Log}, 0, _Counter) ->
+ {{segment, Log}, {credit, 0}};
+send_chunks(Transport,
+ #consumer{log = Log} = State,
+ Credit,
+ Counter) ->
+ send_chunks(Transport, State, Log, Credit, true, Counter).
+
+send_chunks(_Transport,
+ _State,
+ Segment,
+ 0 = _Credit,
+ _Retry,
+ _Counter) ->
{{segment, Segment}, {credit, 0}};
-send_chunks(Transport, #consumer{socket = S} = State, Segment, Credit, Retry, Counter) ->
- case osiris_log:send_file(S, Segment, send_file_callback(Transport, State, Counter)) of
- {ok, Segment1} ->
- send_chunks(
- Transport,
+send_chunks(Transport,
+ #consumer{configuration = #consumer_configuration{socket = S}} =
State,
- Segment1,
- Credit - 1,
- true,
- Counter
- );
+ Segment,
+ Credit,
+ Retry,
+ Counter) ->
+ case osiris_log:send_file(S, Segment,
+ send_file_callback(Transport, State, Counter))
+ of
+ {ok, Segment1} ->
+ send_chunks(Transport, State, Segment1, Credit - 1, true, Counter);
+ {error, closed} ->
+ {error, closed};
+ {error, enotconn} ->
+ {error, closed};
+ {error, Reason} ->
+ {error, Reason};
{end_of_stream, Segment1} ->
case Retry of
true ->
timer:sleep(1),
- send_chunks(Transport, State, Segment1, Credit, false, Counter);
+ send_chunks(Transport,
+ State,
+ Segment1,
+ Credit,
+ false,
+ Counter);
false ->
- #consumer{member_pid = LocalMember} = State,
- osiris:register_offset_listener(LocalMember, osiris_log:next_offset(Segment1)),
+ #consumer{configuration =
+ #consumer_configuration{member_pid =
+ LocalMember}} =
+ State,
+ osiris:register_offset_listener(LocalMember,
+ osiris_log:next_offset(Segment1)),
{{segment, Segment1}, {credit, Credit}}
end
end.
-emit_stats(Connection, ConnectionState) ->
- [{_, Pid}, {_, Recv_oct}, {_, Send_oct}, {_, Reductions}] = I
- = infos(?SIMPLE_METRICS, Connection, ConnectionState),
+emit_stats(#stream_connection{publishers = Publishers} = Connection,
+ #stream_connection_state{consumers = Consumers} = ConnectionState) ->
+ [{_, Pid}, {_, Recv_oct}, {_, Send_oct}, {_, Reductions}] =
+ I = infos(?SIMPLE_METRICS, Connection, ConnectionState),
Infos = infos(?OTHER_METRICS, Connection, ConnectionState),
rabbit_core_metrics:connection_stats(Pid, Infos),
- rabbit_core_metrics:connection_stats(Pid, Recv_oct, Send_oct, Reductions),
+ rabbit_core_metrics:connection_stats(Pid,
+ Recv_oct,
+ Send_oct,
+ Reductions),
rabbit_event:notify(connection_stats, Infos ++ I),
- Connection1 = rabbit_event:reset_stats_timer(Connection, #stream_connection.stats_timer),
+ [rabbit_stream_metrics:consumer_updated(self(),
+ stream_r(S, Connection),
+ Id,
+ Credit,
+ messages_consumed(Counters),
+ consumer_offset(Counters),
+ consumer_i(offset_lag, Consumer),
+ Properties)
+ || #consumer{configuration =
+ #consumer_configuration{stream = S,
+ subscription_id = Id,
+ counters = Counters,
+ properties = Properties},
+ credit = Credit} =
+ Consumer
+ <- maps:values(Consumers)],
+ [rabbit_stream_metrics:publisher_updated(self(),
+ stream_r(S, Connection),
+ Id,
+ PubReference,
+ messages_published(Counters),
+ messages_confirmed(Counters),
+ messages_errored(Counters))
+ || #publisher{stream = S,
+ publisher_id = Id,
+ reference = PubReference,
+ message_counters = Counters}
+ <- maps:values(Publishers)],
+ Connection1 =
+ rabbit_event:reset_stats_timer(Connection,
+ #stream_connection.stats_timer),
ensure_stats_timer(Connection1).
ensure_stats_timer(Connection = #stream_connection{}) ->
- rabbit_event:ensure_stats_timer(Connection, #stream_connection.stats_timer, emit_stats).
+ rabbit_event:ensure_stats_timer(Connection,
+ #stream_connection.stats_timer, emit_stats).
+
+in_vhost(_Pid, undefined) ->
+ true;
+in_vhost(Pid, VHost) ->
+ case info(Pid, [vhost]) of
+ [{vhost, VHost}] ->
+ true;
+ _ ->
+ false
+ end.
+
+consumers_info(Pid, InfoItems) ->
+ case InfoItems -- ?CONSUMER_INFO_ITEMS of
+ [] ->
+ gen_server2:call(Pid, {consumers_info, InfoItems});
+ UnknownItems ->
+ throw({bad_argument, UnknownItems})
+ end.
+
+consumers_infos(Items,
+ #stream_connection_state{consumers = Consumers}) ->
+ [[{Item, consumer_i(Item, Consumer)} || Item <- Items]
+ || Consumer <- maps:values(Consumers)].
+
+consumer_i(subscription_id,
+ #consumer{configuration =
+ #consumer_configuration{subscription_id = SubId}}) ->
+ SubId;
+consumer_i(credits, #consumer{credit = Credits}) ->
+ Credits;
+consumer_i(messages_consumed,
+ #consumer{configuration =
+ #consumer_configuration{counters = Counters}}) ->
+ messages_consumed(Counters);
+consumer_i(offset,
+ #consumer{configuration =
+ #consumer_configuration{counters = Counters}}) ->
+ consumer_offset(Counters);
+consumer_i(offset_lag,
+ #consumer{configuration =
+ #consumer_configuration{counters = Counters},
+ log = Log}) ->
+ stream_stored_offset(Log) - consumer_offset(Counters);
+consumer_i(connection_pid, _) ->
+ self();
+consumer_i(properties,
+ #consumer{configuration =
+ #consumer_configuration{properties = Properties}}) ->
+ Properties;
+consumer_i(stream,
+ #consumer{configuration =
+ #consumer_configuration{stream = Stream}}) ->
+ Stream.
+
+publishers_info(Pid, InfoItems) ->
+ case InfoItems -- ?PUBLISHER_INFO_ITEMS of
+ [] ->
+ gen_server2:call(Pid, {publishers_info, InfoItems});
+ UnknownItems ->
+ throw({bad_argument, UnknownItems})
+ end.
+
+publishers_infos(Items,
+ #stream_connection{publishers = Publishers}) ->
+ [[{Item, publisher_i(Item, Publisher)} || Item <- Items]
+ || Publisher <- maps:values(Publishers)].
+
+publisher_i(stream, #publisher{stream = S}) ->
+ S;
+publisher_i(connection_pid, _) ->
+ self();
+publisher_i(publisher_id, #publisher{publisher_id = Id}) ->
+ Id;
+publisher_i(reference, #publisher{reference = undefined}) ->
+ <<"">>;
+publisher_i(reference, #publisher{reference = Ref}) ->
+ Ref;
+publisher_i(messages_published,
+ #publisher{message_counters = Counters}) ->
+ messages_published(Counters);
+publisher_i(messages_confirmed,
+ #publisher{message_counters = Counters}) ->
+ messages_confirmed(Counters);
+publisher_i(messages_errored,
+ #publisher{message_counters = Counters}) ->
+ messages_errored(Counters).
info(Pid, InfoItems) ->
case InfoItems -- ?INFO_ITEMS of
[] ->
- gen_server2:call(Pid, {info, InfoItems});
- UnknownItems -> throw({bad_argument, UnknownItems})
+ gen_server2:call(Pid, {info, InfoItems}, infinity);
+ UnknownItems ->
+ throw({bad_argument, UnknownItems})
end.
-infos(Items, Connection, State) -> [{Item, i(Item, Connection, State)} || Item <- Items].
+infos(Items, Connection, State) ->
+ [{Item, i(Item, Connection, State)} || Item <- Items].
-i(pid, _, _) -> self();
-i(node, _, _) -> node();
-i(SockStat, #stream_connection{socket = Sock, send_file_oct = Counter}, _) when
- SockStat =:= send_oct -> % Number of bytes sent from the socket.
+i(pid, _, _) ->
+ self();
+i(node, _, _) ->
+ node();
+i(SockStat,
+ #stream_connection{socket = Sock, send_file_oct = Counter}, _)
+ when SockStat =:= send_oct -> % Number of bytes sent from the socket.
case rabbit_net:getstat(Sock, [SockStat]) of
- {ok, [{_, N}]} when is_number(N) -> N + atomics:get(Counter, 1);
- _ -> 0 + atomics:get(Counter, 1)
+ {ok, [{_, N}]} when is_number(N) ->
+ N + atomics:get(Counter, 1);
+ _ ->
+ 0 + atomics:get(Counter, 1)
end;
-i(SockStat, #stream_connection{socket = Sock}, _) when
- SockStat =:= recv_oct; % Number of bytes received by the socket.
- SockStat =:= recv_cnt; % Number of packets received by the socket.
- SockStat =:= send_cnt; % Number of packets sent from the socket.
- SockStat =:= send_pend -> % Number of bytes waiting to be sent by the socket.
+i(SockStat, #stream_connection{socket = Sock}, _)
+ when SockStat =:= recv_oct; % Number of bytes received by the socket.
+ SockStat =:= recv_cnt; % Number of packets received by the socket.
+ SockStat =:= send_cnt; % Number of packets sent from the socket.
+ SockStat
+ =:= send_pend -> % Number of bytes waiting to be sent by the socket.
case rabbit_net:getstat(Sock, [SockStat]) of
- {ok, [{_, N}]} when is_number(N) -> N;
- _ -> 0
+ {ok, [{_, N}]} when is_number(N) ->
+ N;
+ _ ->
+ 0
end;
i(reductions, _, _) ->
{reductions, Reductions} = erlang:process_info(self(), reductions),
Reductions;
i(garbage_collection, _, _) ->
rabbit_misc:get_gc_info(self());
-i(state, Connection, ConnectionState) -> i(connection_state, Connection, ConnectionState);
-i(timeout, Connection, ConnectionState) -> i(heartbeat, Connection, ConnectionState);
-i(name, Connection, ConnectionState) -> i(conn_name, Connection, ConnectionState);
-i(conn_name, #stream_connection{name = Name}, _) -> Name;
-i(port, #stream_connection{port = Port}, _) -> Port;
-i(peer_port, #stream_connection{peer_port = PeerPort}, _) -> PeerPort;
-i(host, #stream_connection{host = Host}, _) -> Host;
-i(peer_host, #stream_connection{peer_host = PeerHost}, _) -> PeerHost;
-i(ssl, _, _) -> false;
-i(peer_cert_subject, _, _) -> '';
-i(peer_cert_issuer, _, _) -> '';
-i(peer_cert_validity, _, _) -> '';
-i(ssl_protocol, _, _) -> '';
-i(ssl_key_exchange, _, _) -> '';
-i(ssl_cipher, _, _) -> '';
-i(ssl_hash, _, _) -> '';
-i(channels, _, _) -> 0;
-i(protocol, _, _) -> {<<"stream">>, ""};
-i(user_who_performed_action, Connection, ConnectionState) -> i(user, Connection, ConnectionState);
-i(user, #stream_connection{user = U}, _) -> U#user.username;
-i(vhost, #stream_connection{virtual_host = VirtualHost}, _) -> VirtualHost;
-i(subscriptions, _, #stream_connection_state{consumers = Consumers}) -> maps:size(Consumers);
-i(connection_state, _Connection, #stream_connection_state{blocked = true}) -> blocked;
-i(connection_state, _Connection, #stream_connection_state{blocked = false}) -> running;
-i(auth_mechanism, #stream_connection{auth_mechanism = none}, _) -> none;
-i(auth_mechanism, #stream_connection{auth_mechanism = {Name, _Mod}}, _) -> Name;
-i(heartbeat, #stream_connection{heartbeat = Heartbeat}, _) -> Heartbeat;
-i(frame_max, #stream_connection{frame_max = FrameMax}, _) -> FrameMax;
-i(channel_max, _, _) -> 0;
-i(client_properties, #stream_connection{client_properties = CP}, _) -> rabbit_misc:to_amqp_table(CP);
-i(connected_at, #stream_connection{connected_at = T}, _) -> T;
-i(Item, #stream_connection{}, _) -> throw({bad_argument, Item}). \ No newline at end of file
+i(state, Connection, ConnectionState) ->
+ i(connection_state, Connection, ConnectionState);
+i(timeout, Connection, ConnectionState) ->
+ i(heartbeat, Connection, ConnectionState);
+i(name, Connection, ConnectionState) ->
+ i(conn_name, Connection, ConnectionState);
+i(conn_name, #stream_connection{name = Name}, _) ->
+ Name;
+i(port, #stream_connection{port = Port}, _) ->
+ Port;
+i(peer_port, #stream_connection{peer_port = PeerPort}, _) ->
+ PeerPort;
+i(host, #stream_connection{host = Host}, _) ->
+ Host;
+i(peer_host, #stream_connection{peer_host = PeerHost}, _) ->
+ PeerHost;
+i(ssl, #stream_connection{socket = Socket, proxy_socket = ProxySock},
+ _) ->
+ rabbit_net:proxy_ssl_info(Socket, ProxySock) /= nossl;
+i(peer_cert_subject, S, _) ->
+ cert_info(fun rabbit_ssl:peer_cert_subject/1, S);
+i(peer_cert_issuer, S, _) ->
+ cert_info(fun rabbit_ssl:peer_cert_issuer/1, S);
+i(peer_cert_validity, S, _) ->
+ cert_info(fun rabbit_ssl:peer_cert_validity/1, S);
+i(ssl_protocol, S, _) ->
+ ssl_info(fun({P, _}) -> P end, S);
+i(ssl_key_exchange, S, _) ->
+ ssl_info(fun({_, {K, _, _}}) -> K end, S);
+i(ssl_cipher, S, _) ->
+ ssl_info(fun({_, {_, C, _}}) -> C end, S);
+i(ssl_hash, S, _) ->
+ ssl_info(fun({_, {_, _, H}}) -> H end, S);
+i(channels, _, _) ->
+ 0;
+i(protocol, _, _) ->
+ <<"stream">>;
+i(user_who_performed_action, Connection, ConnectionState) ->
+ i(user, Connection, ConnectionState);
+i(user, #stream_connection{user = U}, _) ->
+ U#user.username;
+i(vhost, #stream_connection{virtual_host = VirtualHost}, _) ->
+ VirtualHost;
+i(subscriptions, _,
+ #stream_connection_state{consumers = Consumers}) ->
+ maps:size(Consumers);
+i(connection_state, _Connection,
+ #stream_connection_state{blocked = true}) ->
+ blocked;
+i(connection_state, _Connection,
+ #stream_connection_state{blocked = false}) ->
+ running;
+i(auth_mechanism, #stream_connection{auth_mechanism = none}, _) ->
+ none;
+i(auth_mechanism, #stream_connection{auth_mechanism = {Name, _Mod}},
+ _) ->
+ Name;
+i(heartbeat, #stream_connection{heartbeat = Heartbeat}, _) ->
+ Heartbeat;
+i(frame_max, #stream_connection{frame_max = FrameMax}, _) ->
+ FrameMax;
+i(channel_max, _, _) ->
+ 0;
+i(client_properties, #stream_connection{client_properties = CP}, _) ->
+ rabbit_misc:to_amqp_table(CP);
+i(connected_at, #stream_connection{connected_at = T}, _) ->
+ T;
+i(Item, #stream_connection{}, _) ->
+ throw({bad_argument, Item}).
+
+-spec send(module(), rabbit_net:socket(), iodata()) -> ok.
+send(Transport, Socket, Data) when is_atom(Transport) ->
+ Transport:send(Socket, Data).
+
+cert_info(F, #stream_connection{socket = Sock}) ->
+ case rabbit_net:peercert(Sock) of
+ nossl ->
+ '';
+ {error, _} ->
+ '';
+ {ok, Cert} ->
+ list_to_binary(F(Cert))
+ end.
+
+ssl_info(F,
+ #stream_connection{socket = Sock, proxy_socket = ProxySock}) ->
+ case rabbit_net:proxy_ssl_info(Sock, ProxySock) of
+ nossl ->
+ '';
+ {error, _} ->
+ '';
+ {ok, Items} ->
+ P = proplists:get_value(protocol, Items),
+ #{cipher := C,
+ key_exchange := K,
+ mac := H} =
+ proplists:get_value(selected_cipher_suite, Items),
+ F({P, {K, C, H}})
+ end.
+
+get_chunk_selector(Properties) ->
+ binary_to_atom(maps:get(<<"chunk_selector">>, Properties,
+ <<"user_data">>)).
diff --git a/deps/rabbitmq_stream/src/rabbit_stream_sup.erl b/deps/rabbitmq_stream/src/rabbit_stream_sup.erl
index b331b47356..dc9d528fe8 100644
--- a/deps/rabbitmq_stream/src/rabbit_stream_sup.erl
+++ b/deps/rabbitmq_stream/src/rabbit_stream_sup.erl
@@ -11,51 +11,117 @@
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is Pivotal Software, Inc.
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream_sup).
+
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
--include("rabbit_stream.hrl").
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
{ok, Listeners} = application:get_env(rabbitmq_stream, tcp_listeners),
- NumTcpAcceptors = application:get_env(rabbitmq_stream, num_tcp_acceptors, 10),
- {ok, SocketOpts} = application:get_env(rabbitmq_stream, tcp_listen_options),
- Nodes = rabbit_mnesia:cluster_nodes(all),
+ NumTcpAcceptors =
+ application:get_env(rabbitmq_stream, num_tcp_acceptors, 10),
+ SocketOpts =
+ application:get_env(rabbitmq_stream, tcp_listen_options, []),
+
+ {ok, SslListeners0} =
+ application:get_env(rabbitmq_stream, ssl_listeners),
+ SslSocketOpts =
+ application:get_env(rabbitmq_stream, ssl_listen_options, []),
+ {SslOpts, NumSslAcceptors, SslListeners} =
+ case SslListeners0 of
+ [] ->
+ {none, 0, []};
+ _ ->
+ {rabbit_networking:ensure_ssl(),
+ application:get_env(rabbitmq_stream, num_ssl_acceptors, 10),
+ case rabbit_networking:poodle_check('STREAM') of
+ ok ->
+ SslListeners0;
+ danger ->
+ []
+ end}
+ end,
+
+ Nodes = rabbit_nodes:all(),
OsirisConf = #{nodes => Nodes},
- ServerConfiguration = #{
- initial_credits => application:get_env(rabbitmq_stream, initial_credits, ?DEFAULT_INITIAL_CREDITS),
- credits_required_for_unblocking => application:get_env(rabbitmq_stream, credits_required_for_unblocking, ?DEFAULT_CREDITS_REQUIRED_FOR_UNBLOCKING),
- frame_max => application:get_env(rabbit_stream, frame_max, ?DEFAULT_FRAME_MAX),
- heartbeat => application:get_env(rabbit_stream, heartbeat, ?DEFAULT_HEARTBEAT)
- },
+ ServerConfiguration =
+ #{initial_credits =>
+ application:get_env(rabbitmq_stream, initial_credits,
+ ?DEFAULT_INITIAL_CREDITS),
+ credits_required_for_unblocking =>
+ application:get_env(rabbitmq_stream,
+ credits_required_for_unblocking,
+ ?DEFAULT_CREDITS_REQUIRED_FOR_UNBLOCKING),
+ frame_max =>
+ application:get_env(rabbit_stream, frame_max, ?DEFAULT_FRAME_MAX),
+ heartbeat =>
+ application:get_env(rabbit_stream, heartbeat,
+ ?DEFAULT_HEARTBEAT)},
+
+ StreamManager =
+ #{id => rabbit_stream_manager,
+ type => worker,
+ start => {rabbit_stream_manager, start_link, [OsirisConf]}},
- StreamManager = #{id => rabbit_stream_manager,
- type => worker,
- start => {rabbit_stream_manager, start_link, [OsirisConf]}},
+ MetricsGc =
+ #{id => rabbit_stream_metrics_gc_sup,
+ type => worker,
+ start => {rabbit_stream_metrics_gc, start_link, []}},
- {ok, {{one_for_all, 10, 10},
- [StreamManager] ++
- listener_specs(fun tcp_listener_spec/1,
- [SocketOpts, ServerConfiguration, NumTcpAcceptors], Listeners)}}.
+ {ok,
+ {{one_for_all, 10, 10},
+ [StreamManager, MetricsGc]
+ ++ listener_specs(fun tcp_listener_spec/1,
+ [SocketOpts, ServerConfiguration, NumTcpAcceptors],
+ Listeners)
+ ++ listener_specs(fun ssl_listener_spec/1,
+ [SslSocketOpts,
+ SslOpts,
+ ServerConfiguration,
+ NumSslAcceptors],
+ SslListeners)}}.
listener_specs(Fun, Args, Listeners) ->
- [Fun([Address | Args]) ||
- Listener <- Listeners,
+ [Fun([Address | Args])
+ || Listener <- Listeners,
Address <- rabbit_networking:tcp_listener_addresses(Listener)].
-tcp_listener_spec([Address, SocketOpts, Configuration, NumAcceptors]) ->
- rabbit_networking:tcp_listener_spec(
- rabbit_stream_listener_sup, Address, SocketOpts,
- ranch_tcp, rabbit_stream_connection_sup, Configuration,
- stream, NumAcceptors, "Stream TCP listener").
+tcp_listener_spec([Address,
+ SocketOpts,
+ Configuration,
+ NumAcceptors]) ->
+ rabbit_networking:tcp_listener_spec(rabbit_stream_listener_sup,
+ Address,
+ SocketOpts,
+ ranch_tcp,
+ rabbit_stream_connection_sup,
+ Configuration#{transport => tcp},
+ stream,
+ NumAcceptors,
+ "Stream TCP listener").
+ssl_listener_spec([Address,
+ SocketOpts,
+ SslOpts,
+ Configuration,
+ NumAcceptors]) ->
+ rabbit_networking:tcp_listener_spec(rabbit_stream_listener_sup,
+ Address,
+ SocketOpts ++ SslOpts,
+ ranch_ssl,
+ rabbit_stream_connection_sup,
+ Configuration#{transport => ssl},
+ 'stream/ssl',
+ NumAcceptors,
+ "Stream TLS listener").
diff --git a/deps/rabbitmq_stream/src/rabbit_stream_utils.erl b/deps/rabbitmq_stream/src/rabbit_stream_utils.erl
index c20aacb12c..37545424c8 100644
--- a/deps/rabbitmq_stream/src/rabbit_stream_utils.erl
+++ b/deps/rabbitmq_stream/src/rabbit_stream_utils.erl
@@ -11,115 +11,232 @@
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is Pivotal Software, Inc.
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream_utils).
%% API
--export([enforce_correct_stream_name/1, write_messages/3, parse_map/2,
- auth_mechanisms/1, auth_mechanism_to_module/2,
- check_configure_permitted/3, check_write_permitted/3, check_read_permitted/3,
- extract_stream_list/2]).
+-export([enforce_correct_name/1,
+ write_messages/4,
+ parse_map/2,
+ auth_mechanisms/1,
+ auth_mechanism_to_module/2,
+ check_configure_permitted/3,
+ check_write_permitted/3,
+ check_read_permitted/3,
+ extract_stream_list/2,
+ sort_partitions/1,
+ strip_cr_lf/1]).
-define(MAX_PERMISSION_CACHE_SIZE, 12).
-enforce_correct_stream_name(Name) ->
- % from rabbit_channel
- StrippedName = binary:replace(Name, [<<"\n">>, <<"\r">>], <<"">>, [global]),
- case check_name(StrippedName) of
- ok ->
- {ok, StrippedName};
- error ->
- error
- end.
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+enforce_correct_name(Name) ->
+ % from rabbit_channel
+ StrippedName =
+ binary:replace(Name, [<<"\n">>, <<"\r">>], <<"">>, [global]),
+ case check_name(StrippedName) of
+ ok ->
+ {ok, StrippedName};
+ error ->
+ error
+ end.
check_name(<<"amq.", _/binary>>) ->
- error;
+ error;
check_name(<<"">>) ->
- error;
+ error;
check_name(_Name) ->
- ok.
-
-write_messages(_ClusterLeader, _PublisherId, <<>>) ->
- ok;
-write_messages(ClusterLeader, PublisherId, <<PublishingId:64, 0:1, MessageSize:31, Message:MessageSize/binary, Rest/binary>>) ->
- % FIXME handle write error
- ok = osiris:write(ClusterLeader, {PublisherId, PublishingId}, Message),
- write_messages(ClusterLeader, PublisherId, Rest);
-write_messages(ClusterLeader, PublisherId, <<PublishingId:64, 1:1, CompressionType:3, _Unused:4, MessageCount:16, BatchSize:32, Batch:BatchSize/binary, Rest/binary>>) ->
- % FIXME handle write error
- ok = osiris:write(ClusterLeader, {PublisherId, PublishingId}, {batch, MessageCount, CompressionType, Batch}),
- write_messages(ClusterLeader, PublisherId, Rest).
-
+ ok.
+
+write_messages(_ClusterLeader, undefined, _PublisherId, <<>>) ->
+ ok;
+write_messages(ClusterLeader,
+ undefined,
+ PublisherId,
+ <<PublishingId:64,
+ 0:1,
+ MessageSize:31,
+ Message:MessageSize/binary,
+ Rest/binary>>) ->
+ % FIXME handle write error
+ ok =
+ osiris:write(ClusterLeader,
+ undefined,
+ {PublisherId, PublishingId},
+ Message),
+ write_messages(ClusterLeader, undefined, PublisherId, Rest);
+write_messages(ClusterLeader,
+ undefined,
+ PublisherId,
+ <<PublishingId:64,
+ 1:1,
+ CompressionType:3,
+ _Unused:4,
+ MessageCount:16,
+ UncompressedSize:32,
+ BatchSize:32,
+ Batch:BatchSize/binary,
+ Rest/binary>>) ->
+ % FIXME handle write error
+ ok =
+ osiris:write(ClusterLeader,
+ undefined,
+ {PublisherId, PublishingId},
+ {batch,
+ MessageCount,
+ CompressionType,
+ UncompressedSize,
+ Batch}),
+ write_messages(ClusterLeader, undefined, PublisherId, Rest);
+write_messages(_ClusterLeader, _PublisherRef, _PublisherId, <<>>) ->
+ ok;
+write_messages(ClusterLeader,
+ PublisherRef,
+ PublisherId,
+ <<PublishingId:64,
+ 0:1,
+ MessageSize:31,
+ Message:MessageSize/binary,
+ Rest/binary>>) ->
+ % FIXME handle write error
+ ok = osiris:write(ClusterLeader, PublisherRef, PublishingId, Message),
+ write_messages(ClusterLeader, PublisherRef, PublisherId, Rest);
+write_messages(ClusterLeader,
+ PublisherRef,
+ PublisherId,
+ <<PublishingId:64,
+ 1:1,
+ CompressionType:3,
+ _Unused:4,
+ MessageCount:16,
+ UncompressedSize:32,
+ BatchSize:32,
+ Batch:BatchSize/binary,
+ Rest/binary>>) ->
+ % FIXME handle write error
+ ok =
+ osiris:write(ClusterLeader,
+ PublisherRef,
+ PublishingId,
+ {batch,
+ MessageCount,
+ CompressionType,
+ UncompressedSize,
+ Batch}),
+ write_messages(ClusterLeader, PublisherRef, PublisherId, Rest).
parse_map(<<>>, _Count) ->
- {#{}, <<>>};
+ {#{}, <<>>};
parse_map(Content, 0) ->
- {#{}, Content};
+ {#{}, Content};
parse_map(Arguments, Count) ->
- parse_map(#{}, Arguments, Count).
+ parse_map(#{}, Arguments, Count).
parse_map(Acc, <<>>, _Count) ->
- {Acc, <<>>};
+ {Acc, <<>>};
parse_map(Acc, Content, 0) ->
- {Acc, Content};
-parse_map(Acc, <<KeySize:16, Key:KeySize/binary, ValueSize:16, Value:ValueSize/binary, Rest/binary>>, Count) ->
- parse_map(maps:put(Key, Value, Acc), Rest, Count - 1).
+ {Acc, Content};
+parse_map(Acc,
+ <<KeySize:16,
+ Key:KeySize/binary,
+ ValueSize:16,
+ Value:ValueSize/binary,
+ Rest/binary>>,
+ Count) ->
+ parse_map(maps:put(Key, Value, Acc), Rest, Count - 1).
auth_mechanisms(Sock) ->
- {ok, Configured} = application:get_env(rabbit, auth_mechanisms),
- [rabbit_data_coercion:to_binary(Name) || {Name, Module} <- rabbit_registry:lookup_all(auth_mechanism),
- Module:should_offer(Sock), lists:member(Name, Configured)].
+ {ok, Configured} = application:get_env(rabbit, auth_mechanisms),
+ [rabbit_data_coercion:to_binary(Name)
+ || {Name, Module} <- rabbit_registry:lookup_all(auth_mechanism),
+ Module:should_offer(Sock), lists:member(Name, Configured)].
auth_mechanism_to_module(TypeBin, Sock) ->
- case rabbit_registry:binary_to_type(TypeBin) of
- {error, not_found} ->
- rabbit_log:warning("Unknown authentication mechanism '~p'~n", [TypeBin]),
- {error, not_found};
- T ->
- case {lists:member(TypeBin, rabbit_stream_utils:auth_mechanisms(Sock)),
- rabbit_registry:lookup_module(auth_mechanism, T)} of
- {true, {ok, Module}} ->
- {ok, Module};
- _ ->
- rabbit_log:warning("Invalid authentication mechanism '~p'~n", [T]),
- {error, invalid}
- end
- end.
+ case rabbit_registry:binary_to_type(TypeBin) of
+ {error, not_found} ->
+ rabbit_log:warning("Unknown authentication mechanism '~p'",
+ [TypeBin]),
+ {error, not_found};
+ T ->
+ case {lists:member(TypeBin,
+ rabbit_stream_utils:auth_mechanisms(Sock)),
+ rabbit_registry:lookup_module(auth_mechanism, T)}
+ of
+ {true, {ok, Module}} ->
+ {ok, Module};
+ _ ->
+ rabbit_log:warning("Invalid authentication mechanism '~p'",
+ [T]),
+ {error, invalid}
+ end
+ end.
check_resource_access(User, Resource, Perm, Context) ->
- V = {Resource, Context, Perm},
-
- Cache = case get(permission_cache) of
- undefined -> [];
- Other -> Other
- end,
- case lists:member(V, Cache) of
- true -> ok;
- false ->
- try
- rabbit_access_control:check_resource_access(
- User, Resource, Perm, Context),
- CacheTail = lists:sublist(Cache, ?MAX_PERMISSION_CACHE_SIZE - 1),
- put(permission_cache, [V | CacheTail]),
- ok
- catch
- exit:_ ->
- error
- end
- end.
+ V = {Resource, Context, Perm},
+
+ Cache =
+ case get(permission_cache) of
+ undefined ->
+ [];
+ Other ->
+ Other
+ end,
+ case lists:member(V, Cache) of
+ true ->
+ ok;
+ false ->
+ try
+ rabbit_access_control:check_resource_access(User,
+ Resource,
+ Perm,
+ Context),
+ CacheTail =
+ lists:sublist(Cache, ?MAX_PERMISSION_CACHE_SIZE - 1),
+ put(permission_cache, [V | CacheTail]),
+ ok
+ catch
+ exit:_ ->
+ error
+ end
+ end.
check_configure_permitted(Resource, User, Context) ->
- check_resource_access(User, Resource, configure, Context).
+ check_resource_access(User, Resource, configure, Context).
check_write_permitted(Resource, User, Context) ->
- check_resource_access(User, Resource, write, Context).
+ check_resource_access(User, Resource, write, Context).
check_read_permitted(Resource, User, Context) ->
- check_resource_access(User, Resource, read, Context).
+ check_resource_access(User, Resource, read, Context).
extract_stream_list(<<>>, Streams) ->
- Streams;
-extract_stream_list(<<Length:16, Stream:Length/binary, Rest/binary>>, Streams) ->
- extract_stream_list(Rest, [Stream | Streams]). \ No newline at end of file
+ Streams;
+extract_stream_list(<<Length:16, Stream:Length/binary, Rest/binary>>,
+ Streams) ->
+ extract_stream_list(Rest, [Stream | Streams]).
+
+-spec sort_partitions([#binding{}]) -> [#binding{}].
+sort_partitions(Partitions) ->
+ lists:sort(fun(#binding{args = Args1}, #binding{args = Args2}) ->
+ Arg1 =
+ rabbit_misc:table_lookup(Args1,
+ <<"x-stream-partition-order">>),
+ Arg2 =
+ rabbit_misc:table_lookup(Args2,
+ <<"x-stream-partition-order">>),
+ case {Arg1, Arg2} of
+ {{_, Order1}, {_, Order2}} ->
+ rabbit_data_coercion:to_integer(Order1)
+ =< rabbit_data_coercion:to_integer(Order2);
+ {undefined, {_, _Order2}} -> false;
+ {{_, _Order1}, undefined} -> true;
+ _ -> true
+ end
+ end,
+ Partitions).
+
+strip_cr_lf(NameBin) ->
+ binary:replace(NameBin, [<<"\n">>, <<"\r">>], <<"">>, [global]).
diff --git a/deps/rabbitmq_stream/test/command_SUITE.erl b/deps/rabbitmq_stream/test/command_SUITE.erl
deleted file mode 100644
index 41ab5904ff..0000000000
--- a/deps/rabbitmq_stream/test/command_SUITE.erl
+++ /dev/null
@@ -1,136 +0,0 @@
-%% This Source Code Form is subject to the terms of the Mozilla Public
-%% License, v. 2.0. If a copy of the MPL was not distributed with this
-%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
-%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
-%%
-
--module(command_SUITE).
--compile([export_all]).
-
--include_lib("common_test/include/ct.hrl").
--include_lib("eunit/include/eunit.hrl").
--include_lib("amqp_client/include/amqp_client.hrl").
--include("rabbit_stream.hrl").
-
-
--define(COMMAND, 'Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConnectionsCommand').
-
-all() ->
- [
- {group, non_parallel_tests}
- ].
-
-groups() ->
- [
- {non_parallel_tests, [], [
- merge_defaults,
- run
- ]}
- ].
-
-init_per_suite(Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config,
- [{rmq_nodename_suffix, ?MODULE}]),
- rabbit_ct_helpers:log_environment(),
- rabbit_ct_helpers:run_setup_steps(Config1,
- rabbit_ct_broker_helpers:setup_steps()).
-
-end_per_suite(Config) ->
- rabbit_ct_helpers:run_teardown_steps(Config,
- rabbit_ct_broker_helpers:teardown_steps()).
-
-init_per_group(_, Config) ->
- Config.
-
-end_per_group(_, Config) ->
- Config.
-
-init_per_testcase(Testcase, Config) ->
- rabbit_ct_helpers:testcase_started(Config, Testcase).
-
-end_per_testcase(Testcase, Config) ->
- rabbit_ct_helpers:testcase_finished(Config, Testcase).
-
-merge_defaults(_Config) ->
- {[<<"conn_name">>], #{verbose := false}} =
- ?COMMAND:merge_defaults([], #{}),
-
- {[<<"other_key">>], #{verbose := true}} =
- ?COMMAND:merge_defaults([<<"other_key">>], #{verbose => true}),
-
- {[<<"other_key">>], #{verbose := false}} =
- ?COMMAND:merge_defaults([<<"other_key">>], #{verbose => false}).
-
-
-run(Config) ->
-
- Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
- Opts = #{node => Node, timeout => 10000, verbose => false},
-
- %% No connections
- [] = 'Elixir.Enum':to_list(?COMMAND:run([], Opts)),
-
- StreamPort = rabbit_stream_SUITE:get_stream_port(Config),
-
- S1 = start_stream_connection(StreamPort),
- ct:sleep(100),
-
- [[{conn_name, _}]] =
- 'Elixir.Enum':to_list(?COMMAND:run([<<"conn_name">>], Opts)),
-
- S2 = start_stream_connection(StreamPort),
- ct:sleep(100),
-
- [[{conn_name, _}], [{conn_name, _}]] =
- 'Elixir.Enum':to_list(?COMMAND:run([<<"conn_name">>], Opts)),
-
- Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp),
- start_amqp_connection(network, Node, Port),
-
- %% There are still just two connections
- [[{conn_name, _}], [{conn_name, _}]] =
- 'Elixir.Enum':to_list(?COMMAND:run([<<"conn_name">>], Opts)),
-
- start_amqp_connection(direct, Node, Port),
-
- %% Still two MQTT connections, one direct AMQP 0-9-1 connection
- [[{conn_name, _}], [{conn_name, _}]] =
- 'Elixir.Enum':to_list(?COMMAND:run([<<"conn_name">>], Opts)),
-
- %% Verbose returns all keys
- Infos = lists:map(fun(El) -> atom_to_binary(El, utf8) end, ?INFO_ITEMS),
- AllKeys = 'Elixir.Enum':to_list(?COMMAND:run(Infos, Opts)),
- AllKeys = 'Elixir.Enum':to_list(?COMMAND:run([], Opts#{verbose => true})),
-
- %% There are two connections
- [First, _Second] = AllKeys,
-
- %% Keys are INFO_ITEMS
- KeysCount = length(?INFO_ITEMS),
- KeysCount = length(First),
-
- {Keys, _} = lists:unzip(First),
-
- [] = Keys -- ?INFO_ITEMS,
- [] = ?INFO_ITEMS -- Keys,
-
- rabbit_stream_SUITE:test_close(S1),
- rabbit_stream_SUITE:test_close(S2),
- ok.
-
-start_stream_connection(Port) ->
- {ok, S} = gen_tcp:connect("localhost", Port, [{active, false},
- {mode, binary}]),
- rabbit_stream_SUITE:test_peer_properties(S),
- rabbit_stream_SUITE:test_authenticate(S),
- S.
-
-start_amqp_connection(Type, Node, Port) ->
- Params = amqp_params(Type, Node, Port),
- {ok, _Connection} = amqp_connection:start(Params).
-
-amqp_params(network, _, Port) ->
- #amqp_params_network{port = Port};
-amqp_params(direct, Node, _) ->
- #amqp_params_direct{node = Node}.
diff --git a/deps/rabbitmq_stream/test/commands_SUITE.erl b/deps/rabbitmq_stream/test/commands_SUITE.erl
new file mode 100644
index 0000000000..902bfee3c8
--- /dev/null
+++ b/deps/rabbitmq_stream/test/commands_SUITE.erl
@@ -0,0 +1,574 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(commands_SUITE).
+
+-compile(nowarn_export_all).
+-compile([export_all]).
+
+% -include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
+
+-define(WAIT, 5000).
+-define(COMMAND_LIST_CONNECTIONS,
+ 'Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConnectionsCommand').
+-define(COMMAND_LIST_CONSUMERS,
+ 'Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamConsumersCommand').
+-define(COMMAND_LIST_PUBLISHERS,
+ 'Elixir.RabbitMQ.CLI.Ctl.Commands.ListStreamPublishersCommand').
+-define(COMMAND_ADD_SUPER_STREAM,
+ 'Elixir.RabbitMQ.CLI.Ctl.Commands.AddSuperStreamCommand').
+-define(COMMAND_DELETE_SUPER_STREAM,
+ 'Elixir.RabbitMQ.CLI.Ctl.Commands.DeleteSuperStreamCommand').
+
+all() ->
+ [{group, list_connections},
+ {group, list_consumers},
+ {group, list_publishers},
+ {group, super_streams}].
+
+groups() ->
+ [{list_connections, [],
+ [list_connections_merge_defaults, list_connections_run,
+ list_tls_connections_run]},
+ {list_consumers, [],
+ [list_consumers_merge_defaults, list_consumers_run]},
+ {list_publishers, [],
+ [list_publishers_merge_defaults, list_publishers_run]},
+ {super_streams, [],
+ [add_super_stream_merge_defaults,
+ add_super_stream_validate,
+ delete_super_stream_merge_defaults,
+ delete_super_stream_validate,
+ add_delete_super_stream_run]}].
+
+init_per_suite(Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip,
+ "mixed version clusters are not supported for "
+ "this suite"};
+ _ ->
+ Config1 =
+ rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodename_suffix, ?MODULE}]),
+ Config2 =
+ rabbit_ct_helpers:set_config(Config1,
+ {rabbitmq_ct_tls_verify,
+ verify_none}),
+ rabbit_ct_helpers:log_environment(),
+ rabbit_ct_helpers:run_setup_steps(Config2,
+ rabbit_ct_broker_helpers:setup_steps())
+ end.
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ rabbit_ct_broker_helpers:teardown_steps()).
+
+init_per_group(_, Config) ->
+ Config.
+
+end_per_group(_, Config) ->
+ Config.
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase).
+
+end_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_finished(Config, Testcase).
+
+list_connections_merge_defaults(_Config) ->
+ {[<<"conn_name">>], #{verbose := false}} =
+ ?COMMAND_LIST_CONNECTIONS:merge_defaults([], #{}),
+
+ {[<<"other_key">>], #{verbose := true}} =
+ ?COMMAND_LIST_CONNECTIONS:merge_defaults([<<"other_key">>],
+ #{verbose => true}),
+
+ {[<<"other_key">>], #{verbose := false}} =
+ ?COMMAND_LIST_CONNECTIONS:merge_defaults([<<"other_key">>],
+ #{verbose => false}).
+
+list_connections_run(Config) ->
+ Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ Opts =
+ #{node => Node,
+ timeout => 10000,
+ verbose => false},
+
+ %% No connections
+ [] = to_list(?COMMAND_LIST_CONNECTIONS:run([], Opts)),
+
+ StreamPort = rabbit_stream_SUITE:get_stream_port(Config),
+
+ {S1, C1} = start_stream_connection(StreamPort),
+ ?awaitMatch(1, connection_count(Config), ?WAIT),
+
+ [[{conn_name, _}]] =
+ to_list(?COMMAND_LIST_CONNECTIONS:run([<<"conn_name">>], Opts)),
+ [[{ssl, false}]] =
+ to_list(?COMMAND_LIST_CONNECTIONS:run([<<"ssl">>], Opts)),
+
+ {S2, C2} = start_stream_connection(StreamPort),
+ ?awaitMatch(2, connection_count(Config), ?WAIT),
+
+ [[{conn_name, _}], [{conn_name, _}]] =
+ to_list(?COMMAND_LIST_CONNECTIONS:run([<<"conn_name">>], Opts)),
+
+ Port =
+ rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp),
+ start_amqp_connection(network, Node, Port),
+
+ %% There are still just two connections
+ [[{conn_name, _}], [{conn_name, _}]] =
+ to_list(?COMMAND_LIST_CONNECTIONS:run([<<"conn_name">>], Opts)),
+
+ start_amqp_connection(direct, Node, Port),
+
+ %% Still two stream connections, one direct AMQP 0-9-1 connection
+ [[{conn_name, _}], [{conn_name, _}]] =
+ to_list(?COMMAND_LIST_CONNECTIONS:run([<<"conn_name">>], Opts)),
+
+ %% Verbose returns all keys
+ Infos =
+ lists:map(fun(El) -> atom_to_binary(El, utf8) end, ?INFO_ITEMS),
+ AllKeys = to_list(?COMMAND_LIST_CONNECTIONS:run(Infos, Opts)),
+ Verbose =
+ to_list(?COMMAND_LIST_CONNECTIONS:run([], Opts#{verbose => true})),
+ ?assertEqual(AllKeys, Verbose),
+
+ %% There are two connections
+ [First, _Second] = AllKeys,
+
+ %% Keys are INFO_ITEMS
+ ?assertEqual(length(?INFO_ITEMS), length(First)),
+
+ {Keys, _} = lists:unzip(First),
+
+ ?assertEqual([], Keys -- ?INFO_ITEMS),
+ ?assertEqual([], ?INFO_ITEMS -- Keys),
+
+ rabbit_stream_SUITE:test_close(gen_tcp, S1, C1),
+ rabbit_stream_SUITE:test_close(gen_tcp, S2, C2),
+ ok.
+
+list_tls_connections_run(Config) ->
+ Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ Opts =
+ #{node => Node,
+ timeout => 10000,
+ verbose => false},
+
+ %% No connections
+ [] = to_list(?COMMAND_LIST_CONNECTIONS:run([], Opts)),
+
+ StreamTlsPort = rabbit_stream_SUITE:get_stream_port_tls(Config),
+ application:ensure_all_started(ssl),
+
+ {S1, C1} = start_stream_tls_connection(StreamTlsPort),
+ ?awaitMatch(1, connection_count(Config), ?WAIT),
+
+ [[{conn_name, _}]] =
+ to_list(?COMMAND_LIST_CONNECTIONS:run([<<"conn_name">>], Opts)),
+ [[{ssl, true}]] =
+ to_list(?COMMAND_LIST_CONNECTIONS:run([<<"ssl">>], Opts)),
+
+ rabbit_stream_SUITE:test_close(ssl, S1, C1),
+ ok.
+
+list_consumers_merge_defaults(_Config) ->
+ DefaultItems =
+ [rabbit_data_coercion:to_binary(Item)
+ || Item <- ?CONSUMER_INFO_ITEMS],
+ {DefaultItems, #{verbose := false}} =
+ ?COMMAND_LIST_CONSUMERS:merge_defaults([], #{}),
+
+ {[<<"other_key">>], #{verbose := true}} =
+ ?COMMAND_LIST_CONSUMERS:merge_defaults([<<"other_key">>],
+ #{verbose => true}),
+
+ {[<<"other_key">>], #{verbose := false}} =
+ ?COMMAND_LIST_CONSUMERS:merge_defaults([<<"other_key">>],
+ #{verbose => false}).
+
+list_consumers_run(Config) ->
+ Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ Opts =
+ #{node => Node,
+ timeout => 10000,
+ verbose => false,
+ vhost => <<"/">>},
+
+ %% No connections, no consumers
+ [] = to_list(?COMMAND_LIST_CONSUMERS:run([], Opts)),
+
+ StreamPort = rabbit_stream_SUITE:get_stream_port(Config),
+ {S1, C1} = start_stream_connection(StreamPort),
+ ?awaitMatch(1, connection_count(Config), ?WAIT),
+
+ Stream = <<"list_consumers_run">>,
+ C1_1 = create_stream(S1, Stream, C1),
+ SubId = 42,
+ C1_2 = subscribe(S1, SubId, Stream, C1_1),
+
+ ?awaitMatch(1, consumer_count(Config), ?WAIT),
+
+ {S2, C2} = start_stream_connection(StreamPort),
+ ?awaitMatch(2, connection_count(Config), ?WAIT),
+ C2_1 = subscribe(S2, SubId, Stream, C2),
+
+ ?awaitMatch(2, consumer_count(Config), ?WAIT),
+
+ %% Verbose returns all keys
+ InfoItems = ?CONSUMER_INFO_ITEMS,
+ Infos = lists:map(fun(El) -> atom_to_binary(El, utf8) end, InfoItems),
+ AllKeys = to_list(?COMMAND_LIST_CONSUMERS:run(Infos, Opts)),
+ Verbose =
+ to_list(?COMMAND_LIST_CONSUMERS:run([], Opts#{verbose => true})),
+ ?assertEqual(AllKeys, Verbose),
+ %% There are two consumers
+ [[First], [_Second]] = AllKeys,
+
+ %% Keys are info items
+ ?assertEqual(length(InfoItems), length(First)),
+
+ {Keys, _} = lists:unzip(First),
+
+ ?assertEqual([], Keys -- InfoItems),
+ ?assertEqual([], InfoItems -- Keys),
+
+ C1_3 = delete_stream(S1, Stream, C1_2),
+ % metadata_update_stream_deleted(S1, Stream),
+ metadata_update_stream_deleted(S2, Stream, C2_1),
+ close(S1, C1_3),
+ close(S2, C2_1),
+ ?awaitMatch(0, consumer_count(Config), ?WAIT),
+ ok.
+
+list_publishers_merge_defaults(_Config) ->
+ DefaultItems =
+ [rabbit_data_coercion:to_binary(Item)
+ || Item <- ?PUBLISHER_INFO_ITEMS],
+ {DefaultItems, #{verbose := false}} =
+ ?COMMAND_LIST_PUBLISHERS:merge_defaults([], #{}),
+
+ {[<<"other_key">>], #{verbose := true}} =
+ ?COMMAND_LIST_PUBLISHERS:merge_defaults([<<"other_key">>],
+ #{verbose => true}),
+
+ {[<<"other_key">>], #{verbose := false}} =
+ ?COMMAND_LIST_PUBLISHERS:merge_defaults([<<"other_key">>],
+ #{verbose => false}).
+
+list_publishers_run(Config) ->
+ Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ Opts =
+ #{node => Node,
+ timeout => 10000,
+ verbose => false,
+ vhost => <<"/">>},
+
+ %% No connections, no publishers
+ [] = to_list(?COMMAND_LIST_PUBLISHERS:run([], Opts)),
+
+ StreamPort = rabbit_stream_SUITE:get_stream_port(Config),
+ {S1, C1} = start_stream_connection(StreamPort),
+ ?awaitMatch(1, connection_count(Config), ?WAIT),
+
+ Stream = <<"list_publishers_run">>,
+ C1_1 = create_stream(S1, Stream, C1),
+ PubId = 42,
+ C1_2 = declare_publisher(S1, PubId, Stream, C1_1),
+
+ ?awaitMatch(1, publisher_count(Config), ?WAIT),
+
+ {S2, C2} = start_stream_connection(StreamPort),
+ ?awaitMatch(2, connection_count(Config), ?WAIT),
+ C2_1 = declare_publisher(S2, PubId, Stream, C2),
+
+ ?awaitMatch(2, publisher_count(Config), ?WAIT),
+
+ %% Verbose returns all keys
+ InfoItems = ?PUBLISHER_INFO_ITEMS,
+ Infos = lists:map(fun(El) -> atom_to_binary(El, utf8) end, InfoItems),
+ AllKeys = to_list(?COMMAND_LIST_PUBLISHERS:run(Infos, Opts)),
+ Verbose =
+ to_list(?COMMAND_LIST_PUBLISHERS:run([], Opts#{verbose => true})),
+ ?assertEqual(AllKeys, Verbose),
+ %% There are two consumers
+ [[First], [_Second]] = AllKeys,
+
+ %% Keys are info items
+ ?assertEqual(length(InfoItems), length(First)),
+
+ {Keys, _} = lists:unzip(First),
+
+ ?assertEqual([], Keys -- InfoItems),
+ ?assertEqual([], InfoItems -- Keys),
+
+ C1_3 = delete_stream(S1, Stream, C1_2),
+ % metadata_update_stream_deleted(S1, Stream),
+ C2_2 = metadata_update_stream_deleted(S2, Stream, C2_1),
+ close(S1, C1_3),
+ close(S2, C2_2),
+ ?awaitMatch(0, publisher_count(Config), ?WAIT),
+ ok.
+
+add_super_stream_merge_defaults(_Config) ->
+ ?assertMatch({[<<"super-stream">>],
+ #{partitions := 3, vhost := <<"/">>}},
+ ?COMMAND_ADD_SUPER_STREAM:merge_defaults([<<"super-stream">>],
+ #{})),
+
+ ?assertMatch({[<<"super-stream">>],
+ #{partitions := 5, vhost := <<"/">>}},
+ ?COMMAND_ADD_SUPER_STREAM:merge_defaults([<<"super-stream">>],
+ #{partitions => 5})),
+
+ DefaultWithRoutingKeys =
+ ?COMMAND_ADD_SUPER_STREAM:merge_defaults([<<"super-stream">>],
+ #{routing_keys =>
+ <<"amer,emea,apac">>}),
+ ?assertMatch({[<<"super-stream">>],
+ #{routing_keys := <<"amer,emea,apac">>, vhost := <<"/">>}},
+ DefaultWithRoutingKeys),
+
+ {_, Opts} = DefaultWithRoutingKeys,
+ ?assertEqual(false, maps:is_key(partitions, Opts)).
+
+add_super_stream_validate(_Config) ->
+ ?assertMatch({validation_failure, not_enough_args},
+ ?COMMAND_ADD_SUPER_STREAM:validate([], #{})),
+ ?assertMatch({validation_failure, too_many_args},
+ ?COMMAND_ADD_SUPER_STREAM:validate([<<"a">>, <<"b">>], #{})),
+ ?assertMatch({validation_failure, _},
+ ?COMMAND_ADD_SUPER_STREAM:validate([<<"a">>],
+ #{partitions => 1,
+ routing_keys =>
+ <<"a,b,c">>})),
+ ?assertMatch({validation_failure, _},
+ ?COMMAND_ADD_SUPER_STREAM:validate([<<"a">>],
+ #{partitions => 0})),
+ ?assertEqual(ok,
+ ?COMMAND_ADD_SUPER_STREAM:validate([<<"a">>],
+ #{partitions => 5})),
+ ?assertEqual(ok,
+ ?COMMAND_ADD_SUPER_STREAM:validate([<<"a">>],
+ #{routing_keys =>
+ <<"a,b,c">>})),
+
+ [case Expected of
+ ok ->
+ ?assertEqual(ok,
+ ?COMMAND_ADD_SUPER_STREAM:validate([<<"a">>], Opts));
+ error ->
+ ?assertMatch({validation_failure, _},
+ ?COMMAND_ADD_SUPER_STREAM:validate([<<"a">>], Opts))
+ end
+ || {Opts, Expected}
+ <- [{#{max_length_bytes => 1000}, ok},
+ {#{max_length_bytes => <<"1000">>}, ok},
+ {#{max_length_bytes => <<"100gb">>}, ok},
+ {#{max_length_bytes => <<"50mb">>}, ok},
+ {#{max_length_bytes => <<"50bm">>}, error},
+ {#{max_age => <<"PT10M">>}, ok},
+ {#{max_age => <<"P5DT8H">>}, ok},
+ {#{max_age => <<"foo">>}, error},
+ {#{stream_max_segment_size_bytes => 1000}, ok},
+ {#{stream_max_segment_size_bytes => <<"1000">>}, ok},
+ {#{stream_max_segment_size_bytes => <<"100gb">>}, ok},
+ {#{stream_max_segment_size_bytes => <<"50mb">>}, ok},
+ {#{stream_max_segment_size_bytes => <<"50bm">>}, error},
+ {#{leader_locator => <<"client-local">>}, ok},
+ {#{leader_locator => <<"least-leaders">>}, ok},
+ {#{leader_locator => <<"random">>}, ok},
+ {#{leader_locator => <<"foo">>}, error},
+ {#{initial_cluster_size => <<"1">>}, ok},
+ {#{initial_cluster_size => <<"2">>}, ok},
+ {#{initial_cluster_size => <<"3">>}, ok},
+ {#{initial_cluster_size => <<"0">>}, error},
+ {#{initial_cluster_size => <<"-1">>}, error},
+ {#{initial_cluster_size => <<"foo">>}, error}]],
+ ok.
+
+delete_super_stream_merge_defaults(_Config) ->
+ ?assertMatch({[<<"super-stream">>], #{vhost := <<"/">>}},
+ ?COMMAND_DELETE_SUPER_STREAM:merge_defaults([<<"super-stream">>],
+ #{})),
+ ok.
+
+delete_super_stream_validate(_Config) ->
+ ?assertMatch({validation_failure, not_enough_args},
+ ?COMMAND_DELETE_SUPER_STREAM:validate([], #{})),
+ ?assertMatch({validation_failure, too_many_args},
+ ?COMMAND_DELETE_SUPER_STREAM:validate([<<"a">>, <<"b">>],
+ #{})),
+ ?assertEqual(ok, ?COMMAND_ADD_SUPER_STREAM:validate([<<"a">>], #{})),
+ ok.
+
+add_delete_super_stream_run(Config) ->
+ Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ Opts =
+ #{node => Node,
+ timeout => 10000,
+ vhost => <<"/">>},
+
+ % with number of partitions
+ ?assertMatch({ok, _},
+ ?COMMAND_ADD_SUPER_STREAM:run([<<"invoices">>],
+ maps:merge(#{partitions => 3},
+ Opts))),
+ ?assertEqual({ok,
+ [<<"invoices-0">>, <<"invoices-1">>, <<"invoices-2">>]},
+ partitions(Config, <<"invoices">>)),
+ ?assertMatch({ok, _},
+ ?COMMAND_DELETE_SUPER_STREAM:run([<<"invoices">>], Opts)),
+ ?assertEqual({error, stream_not_found},
+ partitions(Config, <<"invoices">>)),
+
+ % with routing keys
+ ?assertMatch({ok, _},
+ ?COMMAND_ADD_SUPER_STREAM:run([<<"invoices">>],
+ maps:merge(#{routing_keys =>
+ <<" amer,emea , apac">>},
+ Opts))),
+ ?assertEqual({ok,
+ [<<"invoices-amer">>, <<"invoices-emea">>,
+ <<"invoices-apac">>]},
+ partitions(Config, <<"invoices">>)),
+ ?assertMatch({ok, _},
+ ?COMMAND_DELETE_SUPER_STREAM:run([<<"invoices">>], Opts)),
+ ?assertEqual({error, stream_not_found},
+ partitions(Config, <<"invoices">>)),
+
+ % with arguments
+ ExtraOptions =
+ #{partitions => 3,
+ max_length_bytes => <<"50mb">>,
+ max_age => <<"PT10M">>,
+ stream_max_segment_size_bytes => <<"1mb">>,
+ leader_locator => <<"random">>,
+ initial_cluster_size => <<"1">>},
+
+ ?assertMatch({ok, _},
+ ?COMMAND_ADD_SUPER_STREAM:run([<<"invoices">>],
+ maps:merge(ExtraOptions, Opts))),
+
+ {ok, Q} = queue_lookup(Config, <<"invoices-0">>),
+ Args = amqqueue:get_arguments(Q),
+ ?assertMatch({_, <<"random">>},
+ rabbit_misc:table_lookup(Args, <<"x-queue-leader-locator">>)),
+ ?assertMatch({_, 1},
+ rabbit_misc:table_lookup(Args, <<"x-initial-cluster-size">>)),
+ ?assertMatch({_, 1000000},
+ rabbit_misc:table_lookup(Args,
+ <<"x-stream-max-segment-size-bytes">>)),
+ ?assertMatch({_, <<"600s">>},
+ rabbit_misc:table_lookup(Args, <<"x-max-age">>)),
+ ?assertMatch({_, 50000000},
+ rabbit_misc:table_lookup(Args, <<"x-max-length-bytes">>)),
+ ?assertMatch({_, <<"stream">>},
+ rabbit_misc:table_lookup(Args, <<"x-queue-type">>)),
+
+ ?assertMatch({ok, _},
+ ?COMMAND_DELETE_SUPER_STREAM:run([<<"invoices">>], Opts)),
+
+ ok.
+
+partitions(Config, Name) ->
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_manager,
+ partitions,
+ [<<"/">>, Name]).
+
+create_stream(S, Stream, C0) ->
+ rabbit_stream_SUITE:test_create_stream(gen_tcp, S, Stream, C0).
+
+subscribe(S, SubId, Stream, C) ->
+ rabbit_stream_SUITE:test_subscribe(gen_tcp, S, SubId, Stream, C).
+
+declare_publisher(S, PubId, Stream, C) ->
+ rabbit_stream_SUITE:test_declare_publisher(gen_tcp,
+ S,
+ PubId,
+ Stream,
+ C).
+
+delete_stream(S, Stream, C) ->
+ rabbit_stream_SUITE:test_delete_stream(gen_tcp, S, Stream, C).
+
+metadata_update_stream_deleted(S, Stream, C) ->
+ rabbit_stream_SUITE:test_metadata_update_stream_deleted(gen_tcp,
+ S,
+ Stream,
+ C).
+
+close(S, C) ->
+ rabbit_stream_SUITE:test_close(gen_tcp, S, C).
+
+options(Config) ->
+ Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ Opts =
+ #{node => Node,
+ timeout => 10000,
+ verbose => false,
+ vhost => <<"/">>}, %% just for list_consumers and list_publishers
+ Opts.
+
+to_list(CommandRun) ->
+ 'Elixir.Enum':to_list(CommandRun).
+
+command_result_count(CommandRun) ->
+ length(to_list(CommandRun)).
+
+connection_count(Config) ->
+ command_result_count(?COMMAND_LIST_CONNECTIONS:run([<<"conn_name">>],
+ options(Config))).
+
+consumer_count(Config) ->
+ command_result_count(?COMMAND_LIST_CONSUMERS:run([<<"stream">>],
+ options(Config))).
+
+publisher_count(Config) ->
+ command_result_count(?COMMAND_LIST_PUBLISHERS:run([<<"stream">>],
+ options(Config))).
+
+start_stream_connection(Port) ->
+ start_stream_connection(gen_tcp, Port).
+
+start_stream_tls_connection(Port) ->
+ start_stream_connection(ssl, Port).
+
+start_stream_connection(Transport, Port) ->
+ {ok, S} =
+ Transport:connect("localhost", Port,
+ [{active, false}, {mode, binary}]),
+ C0 = rabbit_stream_core:init(0),
+ C1 = rabbit_stream_SUITE:test_peer_properties(Transport, S, C0),
+ C = rabbit_stream_SUITE:test_authenticate(Transport, S, C1),
+ {S, C}.
+
+start_amqp_connection(Type, Node, Port) ->
+ Params = amqp_params(Type, Node, Port),
+ {ok, _Connection} = amqp_connection:start(Params).
+
+amqp_params(network, _, Port) ->
+ #amqp_params_network{port = Port};
+amqp_params(direct, Node, _) ->
+ #amqp_params_direct{node = Node}.
+
+queue_lookup(Config, Q) ->
+ QueueName = rabbit_misc:r(<<"/">>, queue, Q),
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_amqqueue,
+ lookup,
+ [QueueName]).
diff --git a/deps/rabbitmq_stream/test/config_schema_SUITE.erl b/deps/rabbitmq_stream/test/config_schema_SUITE.erl
index a298811541..fe2daa2da0 100644
--- a/deps/rabbitmq_stream/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_stream/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
@@ -10,44 +10,47 @@
-compile(export_all).
all() ->
- [
- run_snippets
- ].
+ [run_snippets].
%% -------------------------------------------------------------------
%% Testsuite setup/teardown.
%% -------------------------------------------------------------------
init_per_suite(Config) ->
- rabbit_ct_helpers:log_environment(),
- Config1 = rabbit_ct_helpers:run_setup_steps(Config),
- rabbit_ct_config_schema:init_schemas(rabbitmq_stream, Config1).
+ rabbit_ct_helpers:log_environment(),
+ Config1 = rabbit_ct_helpers:run_setup_steps(Config),
+ rabbit_ct_config_schema:init_schemas(rabbitmq_stream, Config1).
end_per_suite(Config) ->
- rabbit_ct_helpers:run_teardown_steps(Config).
+ rabbit_ct_helpers:run_teardown_steps(Config).
init_per_testcase(Testcase, Config) ->
- rabbit_ct_helpers:testcase_started(Config, Testcase),
- Config1 = rabbit_ct_helpers:set_config(Config, [
- {rmq_nodename_suffix, Testcase}
- ]),
- rabbit_ct_helpers:run_steps(Config1,
- rabbit_ct_broker_helpers:setup_steps() ++
- rabbit_ct_client_helpers:setup_steps()).
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config1 =
+ rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodename_suffix, Testcase}]),
+ rabbit_ct_helpers:run_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps()
+ ++ rabbit_ct_client_helpers:setup_steps()).
end_per_testcase(Testcase, Config) ->
- Config1 = rabbit_ct_helpers:run_steps(Config,
- rabbit_ct_client_helpers:teardown_steps() ++
- rabbit_ct_broker_helpers:teardown_steps()),
- rabbit_ct_helpers:testcase_finished(Config1, Testcase).
+ Config1 =
+ rabbit_ct_helpers:run_steps(Config,
+ rabbit_ct_client_helpers:teardown_steps()
+ ++ rabbit_ct_broker_helpers:teardown_steps()),
+ rabbit_ct_helpers:testcase_finished(Config1, Testcase).
%% -------------------------------------------------------------------
%% Testcases.
%% -------------------------------------------------------------------
run_snippets(Config) ->
- ok = rabbit_ct_broker_helpers:rpc(Config, 0,
- ?MODULE, run_snippets1, [Config]).
+ ok =
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ ?MODULE,
+ run_snippets1,
+ [Config]).
run_snippets1(Config) ->
- rabbit_ct_config_schema:run_snippets(Config).
+ rabbit_ct_config_schema:run_snippets(Config).
diff --git a/deps/rabbitmq_stream/test/config_schema_SUITE_data/rabbitmq_stream.snippets b/deps/rabbitmq_stream/test/config_schema_SUITE_data/rabbitmq_stream.snippets
index 8f60ef9710..eb7f81b55b 100644
--- a/deps/rabbitmq_stream/test/config_schema_SUITE_data/rabbitmq_stream.snippets
+++ b/deps/rabbitmq_stream/test/config_schema_SUITE_data/rabbitmq_stream.snippets
@@ -3,14 +3,14 @@
[{rabbitmq_stream,[{tcp_listeners,[12345]}]}],
[rabbitmq_stream]},
{listeners_ip,
- "stream.listeners.tcp.1 = 127.0.0.1:5555
- stream.listeners.tcp.2 = ::1:5555",
- [{rabbitmq_stream,[{tcp_listeners,[{"127.0.0.1",5555},{"::1",5555}]}]}],
+ "stream.listeners.tcp.1 = 127.0.0.1:5552
+ stream.listeners.tcp.2 = ::1:5552",
+ [{rabbitmq_stream,[{tcp_listeners,[{"127.0.0.1",5552},{"::1",5552}]}]}],
[rabbitmq_stream]},
{listener_tcp_options,
- "stream.listeners.tcp.1 = 127.0.0.1:5555
- stream.listeners.tcp.2 = ::1:5555
+ "stream.listeners.tcp.1 = 127.0.0.1:5552
+ stream.listeners.tcp.2 = ::1:5552
stream.tcp_listen_options.backlog = 2048
stream.tcp_listen_options.recbuf = 8192
@@ -25,8 +25,8 @@
",
[{rabbitmq_stream,[
{tcp_listeners,[
- {"127.0.0.1",5555},
- {"::1",5555}
+ {"127.0.0.1",5552},
+ {"::1",5552}
]}
, {tcp_listen_options, [
{backlog, 2048},
@@ -54,9 +54,13 @@
[rabbitmq_stream]},
{advertised_host_port,
"stream.advertised_host = some-host
- stream.advertised_port = 5556",
+ stream.advertised_tls_host = some-other-host
+ stream.advertised_port = 5556
+ stream.advertised_tls_port = 5553",
[{rabbitmq_stream,[{advertised_host, <<"some-host">>},
- {advertised_port, 5556}]}],
+ {advertised_tls_host, <<"some-other-host">>},
+ {advertised_port, 5556},
+ {advertised_tls_port, 5553}]}],
[rabbitmq_stream]},
{credits,
"stream.frame_max = 2097152
@@ -70,4 +74,4 @@
[{rabbitmq_stream,[{initial_credits, 100000},
{credits_required_for_unblocking, 25000}]}],
[rabbitmq_stream]}
-]. \ No newline at end of file
+].
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE.erl b/deps/rabbitmq_stream/test/rabbit_stream_SUITE.erl
index 4197b1de71..bb40d7d9bf 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE.erl
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE.erl
@@ -11,51 +11,99 @@
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is Pivotal Software, Inc.
-%% Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_stream_SUITE).
--include_lib("common_test/include/ct.hrl").
--include("rabbit_stream.hrl").
+% -include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_assert.hrl").
+-include_lib("rabbitmq_stream_common/include/rabbit_stream.hrl").
+-include("rabbit_stream_metrics.hrl").
+
+-compile(nowarn_export_all).
-compile(export_all).
+-define(WAIT, 5000).
+
all() ->
- [
- {group, single_node},
- {group, cluster}
- ].
+ [{group, single_node}, {group, single_node_1}, {group, cluster}].
groups() ->
- [
- {single_node, [], [test_stream]},
- {cluster, [], [test_stream, java]}
- ].
+ [{single_node, [],
+ [test_stream,
+ test_stream_tls,
+ test_gc_consumers,
+ test_gc_publishers,
+ unauthenticated_client_rejected_tcp_connected,
+ timeout_tcp_connected,
+ unauthenticated_client_rejected_peer_properties_exchanged,
+ timeout_peer_properties_exchanged,
+ unauthenticated_client_rejected_authenticating,
+ timeout_authenticating,
+ timeout_close_sent]},
+ %% Run `test_global_counters` on its own so the global metrics are
+ %% initialised to 0 for each testcase
+ {single_node_1, [], [test_global_counters]},
+ {cluster, [], [test_stream, test_stream_tls, java]}].
init_per_suite(Config) ->
- rabbit_ct_helpers:log_environment(),
- Config.
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip, "mixed version clusters are not supported"};
+ _ ->
+ rabbit_ct_helpers:log_environment(),
+ Config
+ end.
end_per_suite(Config) ->
Config.
-init_per_group(single_node, Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config, [{rmq_nodes_clustered, false}]),
- rabbit_ct_helpers:run_setup_steps(Config1,
- rabbit_ct_broker_helpers:setup_steps());
+init_per_group(Group, Config)
+ when Group == single_node orelse Group == single_node_1 ->
+ Config1 =
+ rabbit_ct_helpers:set_config(Config, [{rmq_nodes_clustered, false}]),
+ Config2 =
+ rabbit_ct_helpers:set_config(Config1,
+ {rabbitmq_ct_tls_verify, verify_none}),
+ Config3 =
+ rabbit_ct_helpers:set_config(Config2, {rabbitmq_stream, verify_none}),
+ rabbit_ct_helpers:run_setup_steps(Config3,
+ [fun(StepConfig) ->
+ rabbit_ct_helpers:merge_app_env(StepConfig,
+ {rabbit,
+ [{core_metrics_gc_interval,
+ 1000}]})
+ end,
+ fun(StepConfig) ->
+ rabbit_ct_helpers:merge_app_env(StepConfig,
+ {rabbitmq_stream,
+ [{connection_negotiation_step_timeout,
+ 500}]})
+ end]
+ ++ rabbit_ct_broker_helpers:setup_steps());
init_per_group(cluster = Group, Config) ->
- Config1 = rabbit_ct_helpers:set_config(Config, [{rmq_nodes_clustered, true}]),
- Config2 = rabbit_ct_helpers:set_config(Config1,
- [{rmq_nodes_count, 3},
- {rmq_nodename_suffix, Group},
- {tcp_ports_base}]),
- rabbit_ct_helpers:run_setup_steps(Config2,
- [fun(StepConfig) ->
- rabbit_ct_helpers:merge_app_env(StepConfig,
- {aten, [{poll_interval, 1000}]})
- end] ++
- rabbit_ct_broker_helpers:setup_steps());
+ Config1 =
+ rabbit_ct_helpers:set_config(Config, [{rmq_nodes_clustered, true}]),
+ Config2 =
+ rabbit_ct_helpers:set_config(Config1,
+ [{rmq_nodes_count, 3},
+ {rmq_nodename_suffix, Group},
+ {tcp_ports_base}]),
+ Config3 =
+ rabbit_ct_helpers:set_config(Config2,
+ {rabbitmq_ct_tls_verify, verify_none}),
+ rabbit_ct_helpers:run_setup_steps(Config3,
+ [fun(StepConfig) ->
+ rabbit_ct_helpers:merge_app_env(StepConfig,
+ {aten,
+ [{poll_interval,
+ 1000}]})
+ end]
+ ++ rabbit_ct_broker_helpers:setup_steps());
init_per_group(_, Config) ->
rabbit_ct_helpers:run_setup_steps(Config).
@@ -63,7 +111,7 @@ end_per_group(java, Config) ->
rabbit_ct_helpers:run_teardown_steps(Config);
end_per_group(_, Config) ->
rabbit_ct_helpers:run_steps(Config,
- rabbit_ct_broker_helpers:teardown_steps()).
+ rabbit_ct_broker_helpers:teardown_steps()).
init_per_testcase(_TestCase, Config) ->
Config.
@@ -71,25 +119,180 @@ init_per_testcase(_TestCase, Config) ->
end_per_testcase(_Test, _Config) ->
ok.
+test_global_counters(Config) ->
+ test_server(gen_tcp, Config),
+ ?assertEqual(#{publishers => 0,
+ consumers => 0,
+ messages_confirmed_total => 2,
+ messages_received_confirm_total => 2,
+ messages_received_total => 2,
+ messages_routed_total => 0,
+ messages_unroutable_dropped_total => 0,
+ messages_unroutable_returned_total => 0,
+ stream_error_access_refused_total => 0,
+ stream_error_authentication_failure_total => 0,
+ stream_error_frame_too_large_total => 0,
+ stream_error_internal_error_total => 0,
+ stream_error_precondition_failed_total => 0,
+ stream_error_publisher_does_not_exist_total => 0,
+ stream_error_sasl_authentication_failure_loopback_total => 0,
+ stream_error_sasl_challenge_total => 0,
+ stream_error_sasl_error_total => 0,
+ stream_error_sasl_mechanism_not_supported_total => 0,
+ stream_error_stream_already_exists_total => 0,
+ stream_error_stream_does_not_exist_total => 0,
+ stream_error_stream_not_available_total => 1,
+ stream_error_subscription_id_already_exists_total => 0,
+ stream_error_subscription_id_does_not_exist_total => 0,
+ stream_error_unknown_frame_total => 0,
+ stream_error_vhost_access_failure_total => 0},
+ get_global_counters(Config)),
+ ok.
+
test_stream(Config) ->
- Port = get_stream_port(Config),
- test_server(Port),
+ test_server(gen_tcp, Config),
+ ok.
+
+test_stream_tls(Config) ->
+ test_server(ssl, Config),
+ ok.
+
+test_gc_consumers(Config) ->
+ Pid = spawn(fun() -> ok end),
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_metrics,
+ consumer_created,
+ [Pid,
+ #resource{name = <<"test">>,
+ kind = queue,
+ virtual_host = <<"/">>},
+ 0,
+ 10,
+ 0,
+ 0,
+ 0,
+ #{}]),
+ ?awaitMatch(0, consumer_count(Config), ?WAIT),
+ ok.
+
+test_gc_publishers(Config) ->
+ Pid = spawn(fun() -> ok end),
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_metrics,
+ publisher_created,
+ [Pid,
+ #resource{name = <<"test">>,
+ kind = queue,
+ virtual_host = <<"/">>},
+ 0,
+ <<"ref">>]),
+ ?awaitMatch(0, publisher_count(Config), ?WAIT),
ok.
+unauthenticated_client_rejected_tcp_connected(Config) ->
+ Port = get_stream_port(Config),
+ {ok, S} =
+ gen_tcp:connect("localhost", Port, [{active, false}, {mode, binary}]),
+ ?assertEqual(ok, gen_tcp:send(S, <<"invalid data">>)),
+ ?assertEqual(closed, wait_for_socket_close(gen_tcp, S, 1)).
+
+timeout_tcp_connected(Config) ->
+ Port = get_stream_port(Config),
+ {ok, S} =
+ gen_tcp:connect("localhost", Port, [{active, false}, {mode, binary}]),
+ ?assertEqual(closed, wait_for_socket_close(gen_tcp, S, 1)).
+
+unauthenticated_client_rejected_peer_properties_exchanged(Config) ->
+ Port = get_stream_port(Config),
+ {ok, S} =
+ gen_tcp:connect("localhost", Port, [{active, false}, {mode, binary}]),
+ C0 = rabbit_stream_core:init(0),
+ test_peer_properties(gen_tcp, S, C0),
+ ?assertEqual(ok, gen_tcp:send(S, <<"invalid data">>)),
+ ?assertEqual(closed, wait_for_socket_close(gen_tcp, S, 1)).
+
+timeout_peer_properties_exchanged(Config) ->
+ Port = get_stream_port(Config),
+ {ok, S} =
+ gen_tcp:connect("localhost", Port, [{active, false}, {mode, binary}]),
+ C0 = rabbit_stream_core:init(0),
+ test_peer_properties(gen_tcp, S, C0),
+ ?assertEqual(closed, wait_for_socket_close(gen_tcp, S, 1)).
+
+unauthenticated_client_rejected_authenticating(Config) ->
+ Port = get_stream_port(Config),
+ {ok, S} =
+ gen_tcp:connect("localhost", Port, [{active, false}, {mode, binary}]),
+ C0 = rabbit_stream_core:init(0),
+ test_peer_properties(gen_tcp, S, C0),
+ SaslHandshakeFrame =
+ rabbit_stream_core:frame({request, 1, sasl_handshake}),
+ ?assertEqual(ok, gen_tcp:send(S, SaslHandshakeFrame)),
+ ?awaitMatch({error, closed}, gen_tcp:send(S, <<"invalid data">>),
+ ?WAIT).
+
+timeout_authenticating(Config) ->
+ Port = get_stream_port(Config),
+ {ok, S} =
+ gen_tcp:connect("localhost", Port, [{active, false}, {mode, binary}]),
+ C0 = rabbit_stream_core:init(0),
+ test_peer_properties(gen_tcp, S, C0),
+ _Frame = rabbit_stream_core:frame({request, 1, sasl_handshake}),
+ ?assertEqual(closed, wait_for_socket_close(gen_tcp, S, 1)).
+
+timeout_close_sent(Config) ->
+ Port = get_stream_port(Config),
+ {ok, S} =
+ gen_tcp:connect("localhost", Port, [{active, false}, {mode, binary}]),
+ C0 = rabbit_stream_core:init(0),
+ C1 = test_peer_properties(gen_tcp, S, C0),
+ C2 = test_authenticate(gen_tcp, S, C1),
+ % Trigger rabbit_stream_reader to transition to state close_sent
+ NonExistentCommand = 999,
+ IOData = <<?REQUEST:1, NonExistentCommand:15, ?VERSION_1:16>>,
+ Size = iolist_size(IOData),
+ Frame = [<<Size:32>> | IOData],
+ ok = gen_tcp:send(S, Frame),
+ {{request, _CorrelationID,
+ {close, ?RESPONSE_CODE_UNKNOWN_FRAME, <<"unknown frame">>}},
+ _Config} =
+ receive_commands(gen_tcp, S, C2),
+ % Now, rabbit_stream_reader is in state close_sent.
+ ?assertEqual(closed, wait_for_socket_close(gen_tcp, S, 1)).
+
+consumer_count(Config) ->
+ ets_count(Config, ?TABLE_CONSUMER).
+
+publisher_count(Config) ->
+ ets_count(Config, ?TABLE_PUBLISHER).
+
+ets_count(Config, Table) ->
+ Info = rabbit_ct_broker_helpers:rpc(Config, 0, ets, info, [Table]),
+ rabbit_misc:pget(size, Info).
+
java(Config) ->
StreamPortNode1 = get_stream_port(Config, 0),
StreamPortNode2 = get_stream_port(Config, 1),
+ StreamPortTlsNode1 = get_stream_port_tls(Config, 0),
+ StreamPortTlsNode2 = get_stream_port_tls(Config, 1),
Node1Name = get_node_name(Config, 0),
Node2Name = get_node_name(Config, 1),
RabbitMqCtl = get_rabbitmqctl(Config),
DataDir = rabbit_ct_helpers:get_config(Config, data_dir),
- MakeResult = rabbit_ct_helpers:make(Config, DataDir, ["tests",
- {"NODE1_STREAM_PORT=~b", [StreamPortNode1]},
- {"NODE1_NAME=~p", [Node1Name]},
- {"NODE2_NAME=~p", [Node2Name]},
- {"NODE2_STREAM_PORT=~b", [StreamPortNode2]},
- {"RABBITMQCTL=~p", [RabbitMqCtl]}
- ]),
+ MakeResult =
+ rabbit_ct_helpers:make(Config, DataDir,
+ ["tests",
+ {"NODE1_STREAM_PORT=~b", [StreamPortNode1]},
+ {"NODE1_STREAM_PORT_TLS=~b",
+ [StreamPortTlsNode1]},
+ {"NODE1_NAME=~p", [Node1Name]},
+ {"NODE2_NAME=~p", [Node2Name]},
+ {"NODE2_STREAM_PORT=~b", [StreamPortNode2]},
+ {"NODE2_STREAM_PORT_TLS=~b",
+ [StreamPortTlsNode2]},
+ {"RABBITMQCTL=~p", [RabbitMqCtl]}]),
{ok, _} = MakeResult.
get_rabbitmqctl(Config) ->
@@ -99,7 +302,15 @@ get_stream_port(Config) ->
get_stream_port(Config, 0).
get_stream_port(Config, Node) ->
- rabbit_ct_broker_helpers:get_node_config(Config, Node, tcp_port_stream).
+ rabbit_ct_broker_helpers:get_node_config(Config, Node,
+ tcp_port_stream).
+
+get_stream_port_tls(Config) ->
+ get_stream_port_tls(Config, 0).
+
+get_stream_port_tls(Config, Node) ->
+ rabbit_ct_broker_helpers:get_node_config(Config, Node,
+ tcp_port_stream_tls).
get_node_name(Config) ->
get_node_name(Config, 0).
@@ -107,160 +318,218 @@ get_node_name(Config) ->
get_node_name(Config, Node) ->
rabbit_ct_broker_helpers:get_node_config(Config, Node, nodename).
-test_server(Port) ->
- {ok, S} = gen_tcp:connect("localhost", Port, [{active, false},
- {mode, binary}]),
- test_peer_properties(S),
- test_authenticate(S),
+test_server(Transport, Config) ->
+ Port =
+ case Transport of
+ gen_tcp ->
+ get_stream_port(Config);
+ ssl ->
+ application:ensure_all_started(ssl),
+ get_stream_port_tls(Config)
+ end,
+ {ok, S} =
+ Transport:connect("localhost", Port,
+ [{active, false}, {mode, binary}]),
+ C0 = rabbit_stream_core:init(0),
+ C1 = test_peer_properties(Transport, S, C0),
+ C2 = test_authenticate(Transport, S, C1),
Stream = <<"stream1">>,
- test_create_stream(S, Stream),
+ C3 = test_create_stream(Transport, S, Stream, C2),
+ PublisherId = 42,
+ ?assertMatch(#{publishers := 0}, get_global_counters(Config)),
+ C4 = test_declare_publisher(Transport, S, PublisherId, Stream, C3),
+ ?awaitMatch(#{publishers := 1}, get_global_counters(Config), ?WAIT),
Body = <<"hello">>,
- test_publish_confirm(S, Stream, Body),
+ C5 = test_publish_confirm(Transport, S, PublisherId, Body, C4),
+ C6 = test_publish_confirm(Transport, S, PublisherId, Body, C5),
SubscriptionId = 42,
- Rest = test_subscribe(S, SubscriptionId, Stream),
- test_deliver(S, Rest, SubscriptionId, Body),
- test_delete_stream(S, Stream),
- test_metadata_update_stream_deleted(S, Stream),
- test_close(S),
- closed = wait_for_socket_close(S, 10),
+ ?assertMatch(#{consumers := 0}, get_global_counters(Config)),
+ C7 = test_subscribe(Transport, S, SubscriptionId, Stream, C6),
+ ?awaitMatch(#{consumers := 1}, get_global_counters(Config), ?WAIT),
+ C8 = test_deliver(Transport, S, SubscriptionId, 0, Body, C7),
+ C9 = test_deliver(Transport, S, SubscriptionId, 1, Body, C8),
+ C10 = test_delete_stream(Transport, S, Stream, C9),
+ _C11 = test_close(Transport, S, C10),
+ closed = wait_for_socket_close(Transport, S, 10),
ok.
-test_peer_properties(S) ->
- PeerPropertiesFrame = <<?COMMAND_PEER_PROPERTIES:16, ?VERSION_0:16, 1:32, 0:32>>,
- PeerPropertiesFrameSize = byte_size(PeerPropertiesFrame),
- gen_tcp:send(S, <<PeerPropertiesFrameSize:32, PeerPropertiesFrame/binary>>),
- {ok, <<_Size:32, ?COMMAND_PEER_PROPERTIES:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_OK:16, _Rest/binary>>} = gen_tcp:recv(S, 0, 5000).
-
-test_authenticate(S) ->
- SaslHandshakeFrame = <<?COMMAND_SASL_HANDSHAKE:16, ?VERSION_0:16, 1:32>>,
- SaslHandshakeFrameSize = byte_size(SaslHandshakeFrame),
- gen_tcp:send(S, <<SaslHandshakeFrameSize:32, SaslHandshakeFrame/binary>>),
+test_peer_properties(Transport, S, C0) ->
+ PeerPropertiesFrame =
+ rabbit_stream_core:frame({request, 1, {peer_properties, #{}}}),
+ ok = Transport:send(S, PeerPropertiesFrame),
+ {Cmd, C} = receive_commands(Transport, S, C0),
+ ?assertMatch({response, 1, {peer_properties, ?RESPONSE_CODE_OK, _}},
+ Cmd),
+ C.
+
+test_authenticate(Transport, S, C0) ->
+ SaslHandshakeFrame =
+ rabbit_stream_core:frame({request, 1, sasl_handshake}),
+ ok = Transport:send(S, SaslHandshakeFrame),
Plain = <<"PLAIN">>,
AmqPlain = <<"AMQPLAIN">>,
- {ok, SaslAvailable} = gen_tcp:recv(S, 0, 5000),
- %% mechanisms order is not deterministic, so checking both orders
- ok = case SaslAvailable of
- <<31:32, ?COMMAND_SASL_HANDSHAKE:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_OK:16, 2:32,
- 5:16, Plain:5/binary, 8:16, AmqPlain:8/binary>> ->
- ok;
- <<31:32, ?COMMAND_SASL_HANDSHAKE:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_OK:16, 2:32,
- 8:16, AmqPlain:8/binary, 5:16, Plain:5/binary>> ->
- ok;
- _ ->
- failed
- end,
+ {Cmd, C1} = receive_commands(Transport, S, C0),
+ case Cmd of
+ {response, _, {sasl_handshake, ?RESPONSE_CODE_OK, Mechanisms}} ->
+ ?assertEqual([AmqPlain, Plain], lists:sort(Mechanisms));
+ _ ->
+ ct:fail("invalid cmd ~p", [Cmd])
+ end,
Username = <<"guest">>,
Password = <<"guest">>,
Null = 0,
PlainSasl = <<Null:8, Username/binary, Null:8, Password/binary>>,
- PlainSaslSize = byte_size(PlainSasl),
-
- SaslAuthenticateFrame = <<?COMMAND_SASL_AUTHENTICATE:16, ?VERSION_0:16, 2:32,
- 5:16, Plain/binary, PlainSaslSize:32, PlainSasl/binary>>,
-
- SaslAuthenticateFrameSize = byte_size(SaslAuthenticateFrame),
- gen_tcp:send(S, <<SaslAuthenticateFrameSize:32, SaslAuthenticateFrame/binary>>),
+ SaslAuthenticateFrame =
+ rabbit_stream_core:frame({request, 2,
+ {sasl_authenticate, Plain, PlainSasl}}),
+ ok = Transport:send(S, SaslAuthenticateFrame),
+ {SaslAuth, C2} = receive_commands(Transport, S, C1),
+ {response, 2, {sasl_authenticate, ?RESPONSE_CODE_OK}} = SaslAuth,
+ {Tune, C3} = receive_commands(Transport, S, C2),
+ {tune, ?DEFAULT_FRAME_MAX, ?DEFAULT_HEARTBEAT} = Tune,
- {ok, <<10:32, ?COMMAND_SASL_AUTHENTICATE:16, ?VERSION_0:16, 2:32, ?RESPONSE_CODE_OK:16, RestTune/binary>>} = gen_tcp:recv(S, 0, 5000),
-
- TuneExpected = <<12:32, ?COMMAND_TUNE:16, ?VERSION_0:16, ?DEFAULT_FRAME_MAX:32, ?DEFAULT_HEARTBEAT:32>>,
- case RestTune of
- <<>> ->
- {ok, TuneExpected} = gen_tcp:recv(S, 0, 5000);
- TuneReceived ->
- TuneExpected = TuneReceived
- end,
-
- TuneFrame = <<?COMMAND_TUNE:16, ?VERSION_0:16, ?DEFAULT_FRAME_MAX:32, 0:32>>,
- TuneFrameSize = byte_size(TuneFrame),
- gen_tcp:send(S, <<TuneFrameSize:32, TuneFrame/binary>>),
+ TuneFrame =
+ rabbit_stream_core:frame({response, 0,
+ {tune, ?DEFAULT_FRAME_MAX, 0}}),
+ ok = Transport:send(S, TuneFrame),
VirtualHost = <<"/">>,
- VirtualHostLength = byte_size(VirtualHost),
- OpenFrame = <<?COMMAND_OPEN:16, ?VERSION_0:16, 3:32, VirtualHostLength:16, VirtualHost/binary>>,
- OpenFrameSize = byte_size(OpenFrame),
- gen_tcp:send(S, <<OpenFrameSize:32, OpenFrame/binary>>),
- {ok, <<10:32, ?COMMAND_OPEN:16, ?VERSION_0:16, 3:32, ?RESPONSE_CODE_OK:16>>} = gen_tcp:recv(S, 0, 5000).
-
-
-test_create_stream(S, Stream) ->
- StreamSize = byte_size(Stream),
- CreateStreamFrame = <<?COMMAND_CREATE_STREAM:16, ?VERSION_0:16, 1:32, StreamSize:16, Stream:StreamSize/binary, 0:32>>,
- FrameSize = byte_size(CreateStreamFrame),
- gen_tcp:send(S, <<FrameSize:32, CreateStreamFrame/binary>>),
- {ok, <<_Size:32, ?COMMAND_CREATE_STREAM:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_OK:16>>} = gen_tcp:recv(S, 0, 5000).
-
-test_delete_stream(S, Stream) ->
- StreamSize = byte_size(Stream),
- DeleteStreamFrame = <<?COMMAND_DELETE_STREAM:16, ?VERSION_0:16, 1:32, StreamSize:16, Stream:StreamSize/binary>>,
- FrameSize = byte_size(DeleteStreamFrame),
- gen_tcp:send(S, <<FrameSize:32, DeleteStreamFrame/binary>>),
- ResponseFrameSize = 10,
- {ok, <<ResponseFrameSize:32, ?COMMAND_DELETE_STREAM:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_OK:16>>} = gen_tcp:recv(S, 4 + 10, 5000).
-
-test_publish_confirm(S, Stream, Body) ->
+ OpenFrame =
+ rabbit_stream_core:frame({request, 3, {open, VirtualHost}}),
+ ok = Transport:send(S, OpenFrame),
+ {{response, 3, {open, ?RESPONSE_CODE_OK, _ConnectionProperties}},
+ C4} =
+ receive_commands(Transport, S, C3),
+ C4.
+
+test_create_stream(Transport, S, Stream, C0) ->
+ CreateStreamFrame =
+ rabbit_stream_core:frame({request, 1, {create_stream, Stream, #{}}}),
+ ok = Transport:send(S, CreateStreamFrame),
+ {Cmd, C} = receive_commands(Transport, S, C0),
+ ?assertMatch({response, 1, {create_stream, ?RESPONSE_CODE_OK}}, Cmd),
+ C.
+
+test_delete_stream(Transport, S, Stream, C0) ->
+ DeleteStreamFrame =
+ rabbit_stream_core:frame({request, 1, {delete_stream, Stream}}),
+ ok = Transport:send(S, DeleteStreamFrame),
+ {Cmd, C1} = receive_commands(Transport, S, C0),
+ ?assertMatch({response, 1, {delete_stream, ?RESPONSE_CODE_OK}}, Cmd),
+ test_metadata_update_stream_deleted(Transport, S, Stream, C1).
+
+test_metadata_update_stream_deleted(Transport, S, Stream, C0) ->
+ {Meta, C1} = receive_commands(Transport, S, C0),
+ {metadata_update, Stream, ?RESPONSE_CODE_STREAM_NOT_AVAILABLE} = Meta,
+ C1.
+
+test_declare_publisher(Transport, S, PublisherId, Stream, C0) ->
+ DeclarePublisherFrame =
+ rabbit_stream_core:frame({request, 1,
+ {declare_publisher,
+ PublisherId,
+ <<>>,
+ Stream}}),
+ ok = Transport:send(S, DeclarePublisherFrame),
+ {Cmd, C} = receive_commands(Transport, S, C0),
+ ?assertMatch({response, 1, {declare_publisher, ?RESPONSE_CODE_OK}},
+ Cmd),
+ C.
+
+test_publish_confirm(Transport, S, PublisherId, Body, C0) ->
BodySize = byte_size(Body),
- StreamSize = byte_size(Stream),
- PublishFrame = <<?COMMAND_PUBLISH:16, ?VERSION_0:16, StreamSize:16, Stream:StreamSize/binary, 42:8, 1:32, 1:64, BodySize:32, Body:BodySize/binary>>,
- FrameSize = byte_size(PublishFrame),
- gen_tcp:send(S, <<FrameSize:32, PublishFrame/binary>>),
- {ok, <<_Size:32, ?COMMAND_PUBLISH_CONFIRM:16, ?VERSION_0:16, 42:8, 1:32, 1:64>>} = gen_tcp:recv(S, 0, 5000).
-
-test_subscribe(S, SubscriptionId, Stream) ->
- StreamSize = byte_size(Stream),
- SubscribeFrame = <<?COMMAND_SUBSCRIBE:16, ?VERSION_0:16, 1:32, SubscriptionId:8, StreamSize:16, Stream:StreamSize/binary,
- ?OFFSET_TYPE_OFFSET:16, 0:64, 10:16>>,
- FrameSize = byte_size(SubscribeFrame),
- gen_tcp:send(S, <<FrameSize:32, SubscribeFrame/binary>>),
- Res = gen_tcp:recv(S, 0, 5000),
- {ok, <<_Size:32, ?COMMAND_SUBSCRIBE:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_OK:16, Rest/binary>>} = Res,
- Rest.
-
-test_deliver(S, Rest, SubscriptionId, Body) ->
- BodySize = byte_size(Body),
- Frame = read_frame(S, Rest),
- <<54:32, ?COMMAND_DELIVER:16, ?VERSION_0:16, SubscriptionId:8, 5:4/unsigned, 0:4/unsigned, 0:8,
- 1:16, 1:32,
- _Timestamp:64, _Epoch:64, 0:64, _Crc:32, _DataLength:32,
- 0:1, BodySize:31/unsigned, Body/binary>> = Frame.
-
-test_metadata_update_stream_deleted(S, Stream) ->
- StreamSize = byte_size(Stream),
- {ok, <<15:32, ?COMMAND_METADATA_UPDATE:16, ?VERSION_0:16, ?RESPONSE_CODE_STREAM_NOT_AVAILABLE:16, StreamSize:16, Stream/binary>>} = gen_tcp:recv(S, 0, 5000).
-
-test_close(S) ->
+ Messages = [<<1:64, 0:1, BodySize:31, Body:BodySize/binary>>],
+ PublishFrame =
+ rabbit_stream_core:frame({publish, PublisherId, 1, Messages}),
+ ok = Transport:send(S, PublishFrame),
+ {Cmd, C} = receive_commands(Transport, S, C0),
+ ?assertMatch({publish_confirm, PublisherId, [1]}, Cmd),
+ C.
+
+test_subscribe(Transport, S, SubscriptionId, Stream, C0) ->
+ SubCmd =
+ {request, 1,
+ {subscribe,
+ SubscriptionId,
+ Stream,
+ 0,
+ 10,
+ #{<<"random">> => <<"thing">>}}},
+ SubscribeFrame = rabbit_stream_core:frame(SubCmd),
+ ok = Transport:send(S, SubscribeFrame),
+ {Cmd, C} = receive_commands(Transport, S, C0),
+ ?assertMatch({response, 1, {subscribe, ?RESPONSE_CODE_OK}}, Cmd),
+ C.
+
+test_deliver(Transport, S, SubscriptionId, COffset, Body, C0) ->
+ ct:pal("test_deliver ", []),
+ {{deliver, SubscriptionId, Chunk}, C} =
+ receive_commands(Transport, S, C0),
+ <<5:4/unsigned,
+ 0:4/unsigned,
+ 0:8,
+ 1:16,
+ 1:32,
+ _Timestamp:64,
+ _Epoch:64,
+ COffset:64,
+ _Crc:32,
+ _DataLength:32,
+ _TrailerLength:32,
+ _ReservedBytes:32,
+ 0:1,
+ BodySize:31/unsigned,
+ Body:BodySize/binary>> =
+ Chunk,
+ C.
+
+test_close(Transport, S, C0) ->
CloseReason = <<"OK">>,
- CloseReasonSize = byte_size(CloseReason),
- CloseFrame = <<?COMMAND_CLOSE:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_OK:16, CloseReasonSize:16, CloseReason/binary>>,
- CloseFrameSize = byte_size(CloseFrame),
- gen_tcp:send(S, <<CloseFrameSize:32, CloseFrame/binary>>),
- {ok, <<10:32, ?COMMAND_CLOSE:16, ?VERSION_0:16, 1:32, ?RESPONSE_CODE_OK:16>>} = gen_tcp:recv(S, 0, 5000).
-
-wait_for_socket_close(_S, 0) ->
+ CloseFrame =
+ rabbit_stream_core:frame({request, 1,
+ {close, ?RESPONSE_CODE_OK, CloseReason}}),
+ ok = Transport:send(S, CloseFrame),
+ {{response, 1, {close, ?RESPONSE_CODE_OK}}, C} =
+ receive_commands(Transport, S, C0),
+ C.
+
+wait_for_socket_close(_Transport, _S, 0) ->
not_closed;
-wait_for_socket_close(S, Attempt) ->
- case gen_tcp:recv(S, 0, 1000) of
+wait_for_socket_close(Transport, S, Attempt) ->
+ case Transport:recv(S, 0, 1000) of
{error, timeout} ->
- wait_for_socket_close(S, Attempt - 1);
+ wait_for_socket_close(Transport, S, Attempt - 1);
{error, closed} ->
closed
end.
-read_frame(S, Buffer) ->
- inet:setopts(S, [{active, once}]),
- receive
- {tcp, S, Received} ->
- Data = <<Buffer/binary, Received/binary>>,
- case Data of
- <<Size:32, _Body:Size/binary>> ->
- Data;
- _ ->
- read_frame(S, Data)
- end
- after
- 1000 ->
- inet:setopts(S, [{active, false}]),
- Buffer
- end. \ No newline at end of file
+receive_commands(Transport, S, C0) ->
+ case rabbit_stream_core:next_command(C0) of
+ empty ->
+ case Transport:recv(S, 0, 5000) of
+ {ok, Data} ->
+ C1 = rabbit_stream_core:incoming_data(Data, C0),
+ case rabbit_stream_core:next_command(C1) of
+ empty ->
+ {ok, Data2} = Transport:recv(S, 0, 5000),
+ rabbit_stream_core:next_command(
+ rabbit_stream_core:incoming_data(Data2, C1));
+ Res ->
+ Res
+ end;
+ {error, Err} ->
+ ct:fail("error receiving data ~w", [Err])
+ end;
+ Res ->
+ Res
+ end.
+
+get_global_counters(Config) ->
+ maps:get([{protocol, stream}],
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_global_counters,
+ overview,
+ [])).
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/.mvn/wrapper/maven-wrapper.jar b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/.mvn/wrapper/maven-wrapper.jar
new file mode 100644
index 0000000000..2cc7d4a55c
--- /dev/null
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/.mvn/wrapper/maven-wrapper.jar
Binary files differ
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/Makefile b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/Makefile
index 89be00931c..0b3647e9e9 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/Makefile
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/Makefile
@@ -2,9 +2,11 @@ export PATH :=$(CURDIR):$(PATH)
HOSTNAME := $(shell hostname)
MVN_FLAGS += -Dhostname=$(HOSTNAME) \
-Dnode1.stream.port=$(NODE1_STREAM_PORT) \
+ -Dnode1.stream.port.tls=$(NODE1_STREAM_PORT_TLS) \
-Dnode1.name=$(NODE1_NAME) \
-Dnode2.name=$(NODE2_NAME) \
-Dnode2.stream.port=$(NODE2_STREAM_PORT) \
+ -Dnode2.stream.port.tls=$(NODE2_STREAM_PORT_TLS) \
-Drabbitmqctl.bin=$(RABBITMQCTL)
.PHONY: tests clean
@@ -12,7 +14,7 @@ MVN_FLAGS += -Dhostname=$(HOSTNAME) \
tests:
# Note: to run a single test
# @mvnw -q $(MVN_FLAGS) -Dtest=StreamTest#metadataOnClusterShouldReturnLeaderAndReplicas test
- @mvnw $(MVN_FLAGS) test
+ @mvnw -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -B $(MVN_FLAGS) test
clean:
@mvnw clean
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/pom.xml b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/pom.xml
index aa27c29baf..7dcf3ea494 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/pom.xml
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/pom.xml
@@ -26,12 +26,10 @@
</developers>
<properties>
- <stream-client.version>0.1.0-SNAPSHOT</stream-client.version>
- <proton-j.version>0.33.6</proton-j.version>
- <junit.jupiter.version>5.7.0</junit.jupiter.version>
- <assertj.version>3.17.2</assertj.version>
- <mockito.version>3.5.11</mockito.version>
- <logback.version>1.2.3</logback.version>
+ <stream-client.version>[0.5.0-SNAPSHOT,1.0-SNAPSHOT)</stream-client.version>
+ <junit.jupiter.version>5.8.2</junit.jupiter.version>
+ <assertj.version>3.21.0</assertj.version>
+ <logback.version>1.2.7</logback.version>
<maven.compiler.plugin.version>3.8.1</maven.compiler.plugin.version>
<maven-surefire-plugin.version>2.22.2</maven-surefire-plugin.version>
<spotless.version>2.2.0</spotless.version>
@@ -47,12 +45,6 @@
</dependency>
<dependency>
- <groupId>org.apache.qpid</groupId>
- <artifactId>proton-j</artifactId>
- <version>${proton-j.version}</version>
- </dependency>
-
- <dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
@@ -74,13 +66,6 @@
</dependency>
<dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-core</artifactId>
- <version>${mockito.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
@@ -140,4 +125,4 @@
</repositories>
-</project> \ No newline at end of file
+</project>
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/ClusterSizeTest.java b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/ClusterSizeTest.java
index 993c19b852..6ecf2b4ae4 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/ClusterSizeTest.java
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/ClusterSizeTest.java
@@ -11,11 +11,14 @@
// The Original Code is RabbitMQ.
//
// The Initial Developer of the Original Code is Pivotal Software, Inc.
-// Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+// Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
//
package com.rabbitmq.stream;
+import static com.rabbitmq.stream.TestUtils.ResponseConditions.ko;
+import static com.rabbitmq.stream.TestUtils.ResponseConditions.ok;
+import static com.rabbitmq.stream.TestUtils.ResponseConditions.responseCode;
import static org.assertj.core.api.Assertions.assertThat;
import com.rabbitmq.stream.impl.Client;
@@ -40,24 +43,29 @@ public class ClusterSizeTest {
String s = UUID.randomUUID().toString();
Response response =
client.create(s, Collections.singletonMap("initial-cluster-size", clusterSize));
- assertThat(response.isOk()).isFalse();
- assertThat(response.getResponseCode()).isEqualTo(Constants.RESPONSE_CODE_PRECONDITION_FAILED);
+ assertThat(response).is(ko()).has(responseCode(Constants.RESPONSE_CODE_PRECONDITION_FAILED));
}
@ParameterizedTest
@CsvSource({"1,1", "2,2", "3,3", "5,3"})
- void clusterSizeShouldReflectOnMetadata(String requestedClusterSize, int expectedClusterSize) {
+ void clusterSizeShouldReflectOnMetadata(String requestedClusterSize, int expectedClusterSize)
+ throws InterruptedException {
Client client = cf.get(new Client.ClientParameters().port(TestUtils.streamPortNode1()));
String s = UUID.randomUUID().toString();
try {
Response response =
client.create(s, Collections.singletonMap("initial-cluster-size", requestedClusterSize));
- assertThat(response.isOk()).isTrue();
+ assertThat(response).is(ok());
StreamMetadata metadata = client.metadata(s).get(s);
assertThat(metadata).isNotNull();
assertThat(metadata.getResponseCode()).isEqualTo(Constants.RESPONSE_CODE_OK);
- int actualClusterSize = metadata.getLeader() == null ? 0 : 1 + metadata.getReplicas().size();
- assertThat(actualClusterSize).isEqualTo(expectedClusterSize);
+ TestUtils.waitUntil(
+ () -> {
+ StreamMetadata m = client.metadata(s).get(s);
+ assertThat(metadata).isNotNull();
+ int actualClusterSize = m.getLeader() == null ? 0 : 1 + m.getReplicas().size();
+ return actualClusterSize == expectedClusterSize;
+ });
} finally {
client.delete(s);
}
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/FailureTest.java b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/FailureTest.java
index c7a390f00d..bf47ad01ee 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/FailureTest.java
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/FailureTest.java
@@ -11,16 +11,21 @@
// The Original Code is RabbitMQ.
//
// The Initial Developer of the Original Code is Pivotal Software, Inc.
-// Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+// Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
//
package com.rabbitmq.stream;
+import static com.rabbitmq.stream.TestUtils.ResponseConditions.ok;
+import static com.rabbitmq.stream.TestUtils.waitAtMost;
+import static com.rabbitmq.stream.TestUtils.waitUntil;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import com.rabbitmq.stream.codec.WrapperMessageBuilder;
import com.rabbitmq.stream.impl.Client;
+import com.rabbitmq.stream.impl.Client.ClientParameters;
+import com.rabbitmq.stream.impl.Client.Response;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.*;
@@ -31,10 +36,14 @@ import java.util.concurrent.atomic.AtomicReference;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@ExtendWith(TestUtils.StreamTestInfrastructureExtension.class)
public class FailureTest {
+ private static final Logger LOGGER = LoggerFactory.getLogger(FailureTest.class);
+
TestUtils.ClientFactory cf;
String stream;
ExecutorService executorService;
@@ -62,8 +71,12 @@ public class FailureTest {
Client.StreamMetadata streamMetadata = metadata.get(stream);
assertThat(streamMetadata).isNotNull();
+ waitUntil(() -> client.metadata(stream).get(stream).getReplicas().size() == 2);
+
+ streamMetadata = client.metadata(stream).get(stream);
assertThat(streamMetadata.getLeader().getPort()).isEqualTo(TestUtils.streamPortNode1());
assertThat(streamMetadata.getReplicas()).isNotEmpty();
+
Client.Broker replica = streamMetadata.getReplicas().get(0);
assertThat(replica.getPort()).isNotEqualTo(TestUtils.streamPortNode1());
@@ -79,8 +92,8 @@ public class FailureTest {
(publisherId, publishingId) -> confirmLatch.get().countDown()));
String message = "all nodes available";
messages.add(message);
+ publisher.declarePublisher((byte) 1, null, stream);
publisher.publish(
- stream,
(byte) 1,
Collections.singletonList(
publisher.messageBuilder().addData(message.getBytes(StandardCharsets.UTF_8)).build()));
@@ -98,8 +111,8 @@ public class FailureTest {
assertThat(metadataLatch.await(10, TimeUnit.SECONDS)).isTrue();
- // wait until there's a new leader
- TestUtils.waitAtMost(
+ // wait until there's a new leader
+ waitAtMost(
Duration.ofSeconds(10),
() -> {
Client.StreamMetadata m = publisher.metadata(stream).get(stream);
@@ -109,8 +122,9 @@ public class FailureTest {
confirmLatch.set(new CountDownLatch(1));
message = "2 nodes available";
messages.add(message);
+
+ publisher.declarePublisher((byte) 1, null, stream);
publisher.publish(
- stream,
(byte) 1,
Collections.singletonList(
publisher
@@ -124,10 +138,12 @@ public class FailureTest {
}
// wait until all the replicas are there
- TestUtils.waitAtMost(
- Duration.ofSeconds(5),
+ waitAtMost(
+ Duration.ofSeconds(10),
() -> {
+ LOGGER.info("Getting metadata for {}", stream);
Client.StreamMetadata m = publisher.metadata(stream).get(stream);
+ LOGGER.info("Metadata for {} (expecting 2 replicas): {}", stream, m);
return m.getReplicas().size() == 2;
});
@@ -135,26 +151,25 @@ public class FailureTest {
message = "all nodes are back";
messages.add(message);
publisher.publish(
- stream,
(byte) 1,
Collections.singletonList(
publisher.messageBuilder().addData(message.getBytes(StandardCharsets.UTF_8)).build()));
assertThat(confirmLatch.get().await(10, TimeUnit.SECONDS)).isTrue();
confirmLatch.set(null);
- CountDownLatch consumeLatch = new CountDownLatch(2);
+ CountDownLatch consumeLatch = new CountDownLatch(messages.size());
Set<String> bodies = ConcurrentHashMap.newKeySet();
Client consumer =
cf.get(
new Client.ClientParameters()
.port(TestUtils.streamPortNode1())
.messageListener(
- (subscriptionId, offset, msg) -> {
+ (subscriptionId, offset, chunkTimestamp, msg) -> {
bodies.add(new String(msg.getBodyAsBinary(), StandardCharsets.UTF_8));
consumeLatch.countDown();
}));
- TestUtils.waitAtMost(
+ waitAtMost(
Duration.ofSeconds(5),
() -> {
Client.Response response =
@@ -162,9 +177,7 @@ public class FailureTest {
return response.isOk();
});
assertThat(consumeLatch.await(10, TimeUnit.SECONDS)).isTrue();
- assertThat(bodies)
- .hasSize(3)
- .contains("all nodes available", "2 nodes available", "all nodes are back");
+ assertThat(bodies).hasSameSizeAs(messages).containsAll(messages);
}
@Test
@@ -211,7 +224,7 @@ public class FailureTest {
cf.get(new Client.ClientParameters().port(TestUtils.streamPortNode2()));
// wait until there's a new leader
try {
- TestUtils.waitAtMost(
+ waitAtMost(
Duration.ofSeconds(5),
() -> {
Client.StreamMetadata m = locator.metadata(stream).get(stream);
@@ -233,6 +246,7 @@ public class FailureTest {
generation.incrementAndGet();
published.clear();
+ newPublisher.declarePublisher((byte) 1, null, stream);
publisher.set(newPublisher);
connected.set(true);
@@ -249,6 +263,7 @@ public class FailureTest {
.shutdownListener(shutdownListener)
.publishConfirmListener(publishConfirmListener));
+ client.declarePublisher((byte) 1, null, stream);
publisher.set(client);
AtomicBoolean keepPublishing = new AtomicBoolean(true);
@@ -270,10 +285,7 @@ public class FailureTest {
.build();
try {
long publishingId =
- publisher
- .get()
- .publish(stream, (byte) 1, Collections.singletonList(message))
- .get(0);
+ publisher.get().publish((byte) 1, Collections.singletonList(message)).get(0);
published.put(publishingId, message);
} catch (Exception e) {
// keep going
@@ -307,7 +319,7 @@ public class FailureTest {
Client metadataClient = cf.get(new Client.ClientParameters().port(TestUtils.streamPortNode2()));
// wait until all the replicas are there
- TestUtils.waitAtMost(
+ waitAtMost(
Duration.ofSeconds(5),
() -> {
Client.StreamMetadata m = metadataClient.metadata(stream).get(stream);
@@ -333,7 +345,7 @@ public class FailureTest {
(client1, subscriptionId, offset, messageCount, dataSize) ->
client1.credit(subscriptionId, 1))
.messageListener(
- (subscriptionId, offset, message) -> {
+ (subscriptionId, offset, chunkTimestamp, message) -> {
consumed.add(message);
generations.add((Long) message.getApplicationProperties().get("generation"));
if (consumed.size() == confirmed.size()) {
@@ -343,7 +355,7 @@ public class FailureTest {
Client.Response response =
consumer.subscribe((byte) 1, stream, OffsetSpecification.first(), 10);
- assertThat(response.isOk()).isTrue();
+ assertThat(response).is(ok());
assertThat(consumedLatch.await(5, TimeUnit.SECONDS)).isTrue();
assertThat(generations).hasSize(2).contains(0L, 1L);
@@ -365,6 +377,10 @@ public class FailureTest {
Client.StreamMetadata streamMetadata = metadata.get(stream);
assertThat(streamMetadata).isNotNull();
+ waitUntil(() -> metadataClient.metadata(stream).get(stream).getReplicas().size() == 2);
+
+ metadata = metadataClient.metadata(stream);
+ streamMetadata = metadata.get(stream);
assertThat(streamMetadata.getLeader()).isNotNull();
assertThat(streamMetadata.getLeader().getPort()).isEqualTo(TestUtils.streamPortNode1());
@@ -389,6 +405,7 @@ public class FailureTest {
.port(streamMetadata.getLeader().getPort())
.publishConfirmListener(publishConfirmListener));
+ publisher.declarePublisher((byte) 1, null, stream);
AtomicLong generation = new AtomicLong(0);
AtomicLong sequence = new AtomicLong(0);
AtomicBoolean keepPublishing = new AtomicBoolean(true);
@@ -408,7 +425,7 @@ public class FailureTest {
.build();
try {
long publishingId =
- publisher.publish(stream, (byte) 1, Collections.singletonList(message)).get(0);
+ publisher.publish((byte) 1, Collections.singletonList(message)).get(0);
published.put(publishingId, message);
} catch (Exception e) {
// keep going
@@ -430,7 +447,7 @@ public class FailureTest {
Set<Long> generations = ConcurrentHashMap.newKeySet();
Set<Long> consumedIds = ConcurrentHashMap.newKeySet();
Client.MessageListener messageListener =
- (subscriptionId, offset, message) -> {
+ (subscriptionId, offset, chunkTimestamp, message) -> {
consumed.add(message);
generations.add((Long) message.getApplicationProperties().get("generation"));
consumedIds.add(message.getProperties().getMessageIdAsLong());
@@ -484,7 +501,7 @@ public class FailureTest {
Client.Response response =
consumer.subscribe((byte) 1, stream, OffsetSpecification.first(), 10);
- assertThat(response.isOk()).isTrue();
+ assertThat(response).is(ok());
// let's publish for a bit of time
Thread.sleep(2000);
@@ -508,8 +525,8 @@ public class FailureTest {
confirmedCount = confirmed.size();
// wait until all the replicas are there
- TestUtils.waitAtMost(
- Duration.ofSeconds(5),
+ waitAtMost(
+ Duration.ofSeconds(10),
() -> {
Client.StreamMetadata m = metadataClient.metadata(stream).get(stream);
return m.getReplicas().size() == 2;
@@ -522,9 +539,9 @@ public class FailureTest {
keepPublishing.set(false);
- assertThat(publishingLatch.await(5, TimeUnit.SECONDS)).isTrue();
+ assertThat(publishingLatch.await(10, TimeUnit.SECONDS)).isTrue();
- TestUtils.waitAtMost(Duration.ofSeconds(5), () -> consumed.size() >= confirmed.size());
+ waitAtMost(Duration.ofSeconds(10), () -> consumed.size() >= confirmed.size());
assertThat(generations).hasSize(2).contains(0L, 1L);
assertThat(consumed).hasSizeGreaterThanOrEqualTo(confirmed.size());
@@ -538,4 +555,33 @@ public class FailureTest {
confirmedIds.forEach(confirmedId -> assertThat(consumedIds).contains(confirmedId));
}
+
+ @Test
+ void declarePublisherShouldNotReturnStreamDoesNotExistOnRestart() throws Exception {
+ try {
+ Host.rabbitmqctl("stop_app");
+ } finally {
+ Host.rabbitmqctl("start_app");
+ }
+ AtomicReference<Client> client = new AtomicReference<>();
+ waitUntil(
+ () -> {
+ try {
+ client.set(cf.get(new ClientParameters().port(TestUtils.streamPortNode1())));
+ } catch (Exception e) {
+
+ }
+ return client.get() != null;
+ });
+ Set<Short> responseCodes = ConcurrentHashMap.newKeySet();
+
+ waitUntil(
+ () -> {
+ Response response = client.get().declarePublisher((byte) 0, null, stream);
+ responseCodes.add(response.getResponseCode());
+ return response.isOk();
+ });
+
+ assertThat(responseCodes).doesNotContain(Constants.RESPONSE_CODE_STREAM_DOES_NOT_EXIST);
+ }
}
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/Host.java b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/Host.java
index 0134038a8b..a816805041 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/Host.java
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/Host.java
@@ -11,7 +11,7 @@
// The Original Code is RabbitMQ.
//
// The Initial Developer of the Original Code is Pivotal Software, Inc.
-// Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+// Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
//
package com.rabbitmq.stream;
@@ -94,20 +94,23 @@ public class Host {
}
public static String node1name() {
- try {
- return System.getProperty(
- "node1.name", "rabbit-1@" + InetAddress.getLocalHost().getHostName());
- } catch (UnknownHostException e) {
- throw new RuntimeException(e);
- }
+ return System.getProperty("node1.name", "rabbit-1@" + hostname());
}
public static String node2name() {
+ return System.getProperty("node2.name", "rabbit-2@" + hostname());
+ }
+
+ public static String hostname() {
try {
- return System.getProperty(
- "node2.name", "rabbit-2@" + InetAddress.getLocalHost().getHostName());
+ return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
- throw new RuntimeException(e);
+ try {
+ Process process = executeCommand("hostname");
+ return capture(process.getInputStream()).trim();
+ } catch (Exception ex) {
+ throw new RuntimeException(ex);
+ }
}
}
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/LeaderLocatorTest.java b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/LeaderLocatorTest.java
index 5dc2256643..7d387c09c2 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/LeaderLocatorTest.java
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/LeaderLocatorTest.java
@@ -11,11 +11,14 @@
// The Original Code is RabbitMQ.
//
// The Initial Developer of the Original Code is Pivotal Software, Inc.
-// Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+// Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
//
package com.rabbitmq.stream;
+import static com.rabbitmq.stream.TestUtils.ResponseConditions.ko;
+import static com.rabbitmq.stream.TestUtils.ResponseConditions.ok;
+import static com.rabbitmq.stream.TestUtils.ResponseConditions.responseCode;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
@@ -47,8 +50,7 @@ public class LeaderLocatorTest {
Client client = cf.get(new Client.ClientParameters().port(TestUtils.streamPortNode1()));
String s = UUID.randomUUID().toString();
Response response = client.create(s, Collections.singletonMap("queue-leader-locator", "foo"));
- assertThat(response.isOk()).isFalse();
- assertThat(response.getResponseCode()).isEqualTo(Constants.RESPONSE_CODE_PRECONDITION_FAILED);
+ assertThat(response).is(ko()).has(responseCode(Constants.RESPONSE_CODE_PRECONDITION_FAILED));
}
@Test
@@ -60,7 +62,7 @@ public class LeaderLocatorTest {
try {
Response response =
client.create(s, Collections.singletonMap("queue-leader-locator", "client-local"));
- assertThat(response.isOk()).isTrue();
+ assertThat(response).is(ok());
StreamMetadata metadata = client.metadata(s).get(s);
assertThat(metadata).isNotNull();
assertThat(metadata.getResponseCode()).isEqualTo(Constants.RESPONSE_CODE_OK);
@@ -136,7 +138,7 @@ public class LeaderLocatorTest {
Response response =
client.create(
s, Collections.singletonMap("queue-leader-locator", "least-leaders"));
- assertThat(response.isOk()).isTrue();
+ assertThat(response).is(ok());
createdStreams.add(s);
});
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/StreamTest.java b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/StreamTest.java
index 08024a12bf..b02057ba35 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/StreamTest.java
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/StreamTest.java
@@ -11,7 +11,7 @@
// The Original Code is RabbitMQ.
//
// The Initial Developer of the Original Code is Pivotal Software, Inc.
-// Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+// Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
//
package com.rabbitmq.stream;
@@ -82,7 +82,10 @@ public class StreamTest {
Client client = cf.get(new Client.ClientParameters().port(TestUtils.streamPortNode1()));
Map<String, Client.StreamMetadata> metadata = client.metadata(stream);
assertThat(metadata).hasSize(1).containsKey(stream);
- Client.StreamMetadata streamMetadata = metadata.get(stream);
+
+ TestUtils.waitUntil(() -> client.metadata(stream).get(stream).getReplicas().size() == 2);
+
+ Client.StreamMetadata streamMetadata = client.metadata(stream).get(stream);
CountDownLatch publishingLatch = new CountDownLatch(messageCount);
Client publisher =
@@ -92,11 +95,11 @@ public class StreamTest {
.publishConfirmListener(
(publisherId, publishingId) -> publishingLatch.countDown()));
+ publisher.declarePublisher((byte) 1, null, stream);
IntStream.range(0, messageCount)
.forEach(
i ->
publisher.publish(
- stream,
(byte) 1,
Collections.singletonList(
publisher
@@ -116,7 +119,7 @@ public class StreamTest {
(client1, subscriptionId, offset, messageCount1, dataSize) ->
client1.credit(subscriptionId, 10))
.messageListener(
- (subscriptionId, offset, message) -> {
+ (subscriptionId, offset, chunkTimestamp, message) -> {
bodies.add(new String(message.getBodyAsBinary(), StandardCharsets.UTF_8));
consumingLatch.countDown();
}));
@@ -129,13 +132,13 @@ public class StreamTest {
}
@Test
- void metadataOnClusterShouldReturnLeaderAndReplicas() {
+ void metadataOnClusterShouldReturnLeaderAndReplicas() throws InterruptedException {
Client client = cf.get(new Client.ClientParameters().port(TestUtils.streamPortNode1()));
Map<String, Client.StreamMetadata> metadata = client.metadata(stream);
assertThat(metadata).hasSize(1).containsKey(stream);
- Client.StreamMetadata streamMetadata = metadata.get(stream);
- assertThat(streamMetadata.getResponseCode()).isEqualTo(Constants.RESPONSE_CODE_OK);
- assertThat(streamMetadata.getReplicas()).hasSize(2);
+ assertThat(metadata.get(stream).getResponseCode()).isEqualTo(Constants.RESPONSE_CODE_OK);
+
+ TestUtils.waitUntil(() -> client.metadata(stream).get(stream).getReplicas().size() == 2);
BiConsumer<Client.Broker, Client.Broker> assertNodesAreDifferent =
(node, anotherNode) -> {
@@ -143,6 +146,8 @@ public class StreamTest {
assertThat(node.getPort()).isNotEqualTo(anotherNode.getPort());
};
+ Client.StreamMetadata streamMetadata = client.metadata(stream).get(stream);
+
streamMetadata
.getReplicas()
.forEach(replica -> assertNodesAreDifferent.accept(replica, streamMetadata.getLeader()));
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java
index c49a8d5832..03015a0c44 100644
--- a/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java
+++ b/deps/rabbitmq_stream/test/rabbit_stream_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java
@@ -11,38 +11,47 @@
// The Original Code is RabbitMQ.
//
// The Initial Developer of the Original Code is Pivotal Software, Inc.
-// Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+// Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
//
package com.rabbitmq.stream;
+import static com.rabbitmq.stream.TestUtils.ResponseConditions.ok;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import com.rabbitmq.stream.impl.Client;
+import com.rabbitmq.stream.impl.Client.Response;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import java.lang.reflect.Field;
+import java.lang.reflect.Method;
import java.time.Duration;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BooleanSupplier;
+import org.assertj.core.api.Condition;
+import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.extension.*;
public class TestUtils {
static int streamPortNode1() {
- String port = System.getProperty("node1.stream.port", "5555");
+ String port = System.getProperty("node1.stream.port", "5552");
return Integer.valueOf(port);
}
static int streamPortNode2() {
- String port = System.getProperty("node2.stream.port", "5556");
+ String port = System.getProperty("node2.stream.port", "5552");
return Integer.valueOf(port);
}
+ static void waitUntil(BooleanSupplier condition) throws InterruptedException {
+ waitAtMost(Duration.ofSeconds(10), condition);
+ }
+
static void waitAtMost(Duration duration, BooleanSupplier condition) throws InterruptedException {
if (condition.getAsBoolean()) {
return;
@@ -92,7 +101,7 @@ public class TestUtils {
try {
Field streamField = context.getTestInstance().get().getClass().getDeclaredField("stream");
streamField.setAccessible(true);
- String stream = UUID.randomUUID().toString();
+ String stream = streamName(context);
streamField.set(context.getTestInstance().get(), stream);
Client client =
new Client(
@@ -100,7 +109,7 @@ public class TestUtils {
.eventLoopGroup(eventLoopGroup(context))
.port(streamPortNode1()));
Client.Response response = client.create(stream);
- assertThat(response.isOk()).isTrue();
+ assertThat(response).is(ok());
client.close();
store(context).put("testMethodStream", stream);
} catch (NoSuchFieldException e) {
@@ -130,7 +139,7 @@ public class TestUtils {
.eventLoopGroup(eventLoopGroup(context))
.port(streamPortNode1()));
Client.Response response = client.delete(stream);
- assertThat(response.isOk()).isTrue();
+ assertThat(response).is(ok());
client.close();
store(context).remove("testMethodStream");
} catch (NoSuchFieldException e) {
@@ -150,6 +159,21 @@ public class TestUtils {
}
}
+ static String streamName(TestInfo info) {
+ return streamName(info.getTestClass().get(), info.getTestMethod().get());
+ }
+
+ private static String streamName(ExtensionContext context) {
+ return streamName(context.getTestInstance().get().getClass(), context.getTestMethod().get());
+ }
+
+ private static String streamName(Class<?> testClass, Method testMethod) {
+ String uuid = UUID.randomUUID().toString();
+ return String.format(
+ "%s_%s%s",
+ testClass.getSimpleName(), testMethod.getName(), uuid.substring(uuid.length() / 2));
+ }
+
static class ClientFactory {
private final EventLoopGroup eventLoopGroup;
@@ -176,4 +200,22 @@ public class TestUtils {
}
}
}
+
+ static class ResponseConditions {
+
+ static Condition<Response> ok() {
+ return new Condition<>(Response::isOk, "Response should be OK");
+ }
+
+ static Condition<Response> ko() {
+ return new Condition<>(response -> !response.isOk(), "Response should be OK");
+ }
+
+ static Condition<Response> responseCode(short expectedResponse) {
+ return new Condition<>(
+ response -> response.getResponseCode() == expectedResponse,
+ "response code %d",
+ expectedResponse);
+ }
+ }
}
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_manager_SUITE.erl b/deps/rabbitmq_stream/test/rabbit_stream_manager_SUITE.erl
new file mode 100644
index 0000000000..397b9f6d53
--- /dev/null
+++ b/deps/rabbitmq_stream/test/rabbit_stream_manager_SUITE.erl
@@ -0,0 +1,180 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_manager_SUITE).
+
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("common_test/include/ct.hrl").
+
+-compile(export_all).
+
+all() ->
+ [{group, non_parallel_tests}].
+
+groups() ->
+ [{non_parallel_tests, [], [manage_super_stream, lookup_leader]}].
+
+%% -------------------------------------------------------------------
+%% Testsuite setup/teardown.
+%% -------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip, "mixed version clusters are not supported"};
+ _ ->
+ rabbit_ct_helpers:log_environment(),
+ Config
+ end.
+
+end_per_suite(Config) ->
+ Config.
+
+init_per_group(_, Config) ->
+ Config1 =
+ rabbit_ct_helpers:set_config(Config, [{rmq_nodes_clustered, false}]),
+ Config2 =
+ rabbit_ct_helpers:set_config(Config1,
+ {rabbitmq_ct_tls_verify, verify_none}),
+ Config3 =
+ rabbit_ct_helpers:set_config(Config2, {rabbitmq_stream, verify_none}),
+ rabbit_ct_helpers:run_setup_steps(Config3,
+ [fun(StepConfig) ->
+ rabbit_ct_helpers:merge_app_env(StepConfig,
+ {rabbit,
+ [{core_metrics_gc_interval,
+ 1000}]})
+ end,
+ fun(StepConfig) ->
+ rabbit_ct_helpers:merge_app_env(StepConfig,
+ {rabbitmq_stream,
+ [{connection_negotiation_step_timeout,
+ 500}]})
+ end]
+ ++ rabbit_ct_broker_helpers:setup_steps()).
+
+end_per_group(_, Config) ->
+ rabbit_ct_helpers:run_steps(Config,
+ rabbit_ct_broker_helpers:teardown_steps()).
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase).
+
+end_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_finished(Config, Testcase).
+
+%% -------------------------------------------------------------------
+%% Testcases.
+%% -------------------------------------------------------------------
+
+lookup_leader(Config) ->
+ Stream = <<"stream_manager_lookup_leader_stream">>,
+ ?assertMatch({ok, _}, create_stream(Config, Stream)),
+
+ {ok, Pid} = lookup_leader(Config, Stream),
+ ?assert(is_pid(Pid)),
+
+ ?assertEqual({error, not_found}, lookup_leader(Config, <<"foo">>)),
+
+ ?assertEqual({ok, deleted}, delete_stream(Config, Stream)).
+
+manage_super_stream(Config) ->
+ % create super stream
+ ?assertEqual(ok,
+ create_super_stream(Config,
+ <<"invoices">>,
+ [<<"invoices-0">>, <<"invoices-1">>,
+ <<"invoices-2">>],
+ [<<"0">>, <<"1">>, <<"2">>])),
+ % get the correct partitions
+ ?assertEqual({ok,
+ [<<"invoices-0">>, <<"invoices-1">>, <<"invoices-2">>]},
+ partitions(Config, <<"invoices">>)),
+
+ [?assertEqual({ok, [Partition]},
+ route(Config, RoutingKey, <<"invoices">>))
+ || {Partition, RoutingKey}
+ <- [{<<"invoices-0">>, <<"0">>}, {<<"invoices-1">>, <<"1">>},
+ {<<"invoices-2">>, <<"2">>}]],
+
+ % get an error if trying to re-create it
+ ?assertMatch({error, _},
+ create_super_stream(Config,
+ <<"invoices">>,
+ [<<"invoices-0">>, <<"invoices-1">>,
+ <<"invoices-2">>],
+ [<<"0">>, <<"1">>, <<"2">>])),
+
+ % can delete it
+ ?assertEqual(ok, delete_super_stream(Config, <<"invoices">>)),
+
+ % create a stream with the same name as a potential partition
+ ?assertMatch({ok, _}, create_stream(Config, <<"invoices-1">>)),
+
+ % cannot create the super stream because a partition already exists
+ ?assertMatch({error, _},
+ create_super_stream(Config,
+ <<"invoices">>,
+ [<<"invoices-0">>, <<"invoices-1">>,
+ <<"invoices-2">>],
+ [<<"0">>, <<"1">>, <<"2">>])),
+
+ ok.
+
+create_super_stream(Config, Name, Partitions, RKs) ->
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_manager,
+ create_super_stream,
+ [<<"/">>,
+ Name,
+ Partitions,
+ #{},
+ RKs,
+ <<"guest">>]).
+
+delete_super_stream(Config, Name) ->
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_manager,
+ delete_super_stream,
+ [<<"/">>, Name, <<"guest">>]).
+
+create_stream(Config, Name) ->
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_manager,
+ create,
+ [<<"/">>, Name, [], <<"guest">>]).
+
+delete_stream(Config, Name) ->
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_manager,
+ delete,
+ [<<"/">>, Name, <<"guest">>]).
+
+lookup_leader(Config, Name) ->
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_manager,
+ lookup_leader,
+ [<<"/">>, Name]).
+
+partitions(Config, Name) ->
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_manager,
+ partitions,
+ [<<"/">>, Name]).
+
+route(Config, RoutingKey, SuperStream) ->
+ rabbit_ct_broker_helpers:rpc(Config,
+ 0,
+ rabbit_stream_manager,
+ route,
+ [RoutingKey, <<"/">>, SuperStream]).
diff --git a/deps/rabbitmq_stream/test/rabbit_stream_utils_SUITE.erl b/deps/rabbitmq_stream/test/rabbit_stream_utils_SUITE.erl
new file mode 100644
index 0000000000..6008918ffa
--- /dev/null
+++ b/deps/rabbitmq_stream/test/rabbit_stream_utils_SUITE.erl
@@ -0,0 +1,73 @@
+-module(rabbit_stream_utils_SUITE).
+
+-compile(nowarn_export_all).
+-compile(export_all).
+
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+%%%===================================================================
+%%% Common Test callbacks
+%%%===================================================================
+
+all() ->
+ [{group, tests}].
+
+suite() ->
+ [{timetrap, {seconds, 30}}].
+
+groups() ->
+ [{tests, [], [sort_partitions]}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+group(_GroupName) ->
+ [].
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, _Config) ->
+ ok.
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_TestCase, _Config) ->
+ ok.
+
+%%%===================================================================
+%%% Test cases
+%%%===================================================================
+
+sort_partitions(_Config) ->
+ [] = rabbit_stream_utils:sort_partitions([]),
+ ?assertEqual([<<"a">>, <<"b">>, <<"c">>],
+ [S
+ || #binding{destination = #resource{name = S}}
+ <- rabbit_stream_utils:sort_partitions([binding(<<"c">>,
+ 2),
+ binding(<<"b">>,
+ 1),
+ binding(<<"a">>,
+ 0)])]),
+ ?assertEqual([<<"a">>, <<"c">>, <<"no-order-field">>],
+ [S
+ || #binding{destination = #resource{name = S}}
+ <- rabbit_stream_utils:sort_partitions([binding(<<"c">>,
+ 10),
+ binding(<<"no-order-field">>),
+ binding(<<"a">>,
+ 0)])]),
+ ok.
+
+binding(Destination, Order) ->
+ #binding{destination = #resource{name = Destination},
+ args = [{<<"x-stream-partition-order">>, signedint, Order}]}.
+
+binding(Destination) ->
+ #binding{destination = #resource{name = Destination}, args = []}.
diff --git a/deps/rabbitmq_stream_common/.gitignore b/deps/rabbitmq_stream_common/.gitignore
new file mode 100644
index 0000000000..30a1e0bed5
--- /dev/null
+++ b/deps/rabbitmq_stream_common/.gitignore
@@ -0,0 +1,56 @@
+.eunit
+*.o
+*.beam
+*.plt
+erl_crash.dump
+.concrete/DEV_MODE
+
+# rebar 2.x
+.rebar
+rel/example_project
+ebin/*.beam
+deps
+
+# rebar 3
+.rebar3
+_build/
+_checkouts/
+
+erl_crash.dump
+.sw?
+.*.sw?
+*.beam
+/.erlang.mk/
+/cover/
+/deps/
+/ebin/
+/logs/
+/plugins/
+/xrefr
+elvis
+callgrind*
+ct.coverdata
+test/ct.cover.spec
+_build
+
+rabbitmq_stream_common.d
+*.plt
+*.d
+
+*.jar
+
+
+*~
+.sw?
+.*.sw?
+*.beam
+*.class
+*.dat
+*.dump
+*.iml
+*.ipr
+*.iws
+.DS_Store
+\#~
+/.idea/
+/deps/
diff --git a/deps/rabbitmq_stream_common/BUILD.bazel b/deps/rabbitmq_stream_common/BUILD.bazel
new file mode 100644
index 0000000000..4e8240fcb8
--- /dev/null
+++ b/deps/rabbitmq_stream_common/BUILD.bazel
@@ -0,0 +1,37 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_stream_common"
+
+APP_DESCRIPTION = "RabbitMQ Stream Common"
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_name = APP_NAME,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+suites = [
+ rabbitmq_suite(
+ name = "rabbit_stream_core_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_stream_common/CODE_OF_CONDUCT.md b/deps/rabbitmq_stream_common/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..1f6ef1c576
--- /dev/null
+++ b/deps/rabbitmq_stream_common/CODE_OF_CONDUCT.md
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+ without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_stream_common/CONTRIBUTING.md b/deps/rabbitmq_stream_common/CONTRIBUTING.md
new file mode 100644
index 0000000000..339d097deb
--- /dev/null
+++ b/deps/rabbitmq_stream_common/CONTRIBUTING.md
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://cla.pivotal.io/) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_stream_common/LICENSE b/deps/rabbitmq_stream_common/LICENSE
new file mode 100644
index 0000000000..669a2bf450
--- /dev/null
+++ b/deps/rabbitmq_stream_common/LICENSE
@@ -0,0 +1,5 @@
+This package, the RabbitMQ server is licensed under the MPL 2.0. For the
+MPL 2.0, please see LICENSE-MPL-RabbitMQ.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com. \ No newline at end of file
diff --git a/packaging/debs/Debian/debian/copyright b/deps/rabbitmq_stream_common/LICENSE-MPL-RabbitMQ
index b5b083d80a..c4b20dbc66 100644
--- a/packaging/debs/Debian/debian/copyright
+++ b/deps/rabbitmq_stream_common/LICENSE-MPL-RabbitMQ
@@ -1,17 +1,3 @@
-Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
-Upstream-Name: rabbitmq-server
-Upstream-Contact: Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-Source: https://github.com/rabbitmq/rabbitmq-server
-
-Files: *
-Copyright: 2007-2020 VMware, Inc. or its affiliates.
-License: MPL-2.0
-
-Files: deps/rabbit_common/src/rabbit_numerical.erl deps/rabbit_common/src/rabbit_http_util.erl
-Copyright: 2007 Mochi Media, Inc.
-License: MIT
-
-License: MPL-2.0
Mozilla Public License Version 2.0
==================================
@@ -378,28 +364,7 @@ file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
-Copyright (c) 2007-2020 VMware, Inc. or its affiliates.
-
-License: MIT
- This is the MIT license
- .
- Copyright (c) 2007 Mochi Media, Inc
- .
- Permission is hereby granted, free of charge, to any person obtaining
- a copy of this software and associated documentation files (the
- "Software"), to deal in the Software without restriction, including
- without limitation the rights to use, copy, modify, merge, publish,
- distribute, sublicense, and/or sell copies of the Software, and to
- permit persons to whom the Software is furnished to do so, subject to
- the following conditions
- :
- The above copyright notice and this permission notice shall be included
- in all copies or substantial portions of the Software
- .
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
- IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
- CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
- TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+The Original Code is RabbitMQ.
+
+The Initial Developer of the Original Code is Pivotal Software, Inc.
+Copyright (c) 2020 VMware, Inc or its affiliates. All rights reserved. \ No newline at end of file
diff --git a/deps/rabbitmq_stream_common/Makefile b/deps/rabbitmq_stream_common/Makefile
new file mode 100644
index 0000000000..726ede0394
--- /dev/null
+++ b/deps/rabbitmq_stream_common/Makefile
@@ -0,0 +1,24 @@
+PROJECT = rabbitmq_stream_common
+PROJECT_DESCRIPTION = RabbitMQ Stream Common
+PROJECT_MOD = rabbit_stream_common
+
+define PROJECT_ENV
+[
+]
+endef
+
+
+DEPS =
+TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers
+
+DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-plugin.mk
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_stream_common/README.adoc b/deps/rabbitmq_stream_common/README.adoc
new file mode 100644
index 0000000000..4260d12d44
--- /dev/null
+++ b/deps/rabbitmq_stream_common/README.adoc
@@ -0,0 +1,23 @@
+= RabbitMQ Stream Common Plugin
+
+== Project Maturity
+
+The project is in early stages of development and is considered experimental.
+It is not ready for production use.
+
+== Support
+
+* For questions: https://groups.google.com/forum/#!forum/rabbitmq-users[RabbitMQ Users]
+* For bugs and feature requests: https://github.com/rabbitmq/rabbitmq-server/issues[GitHub Issues]
+
+The project is currently under development, there is no guarantee yet that it will be maintained and supported
+in the future (read: you are welcome to experiment with it and give feedback, but please do not base
+your whole business on it).
+
+== Licensing
+
+Released under the link:LICENSE-MPL-RabbitMQ[MPL 2.0].
+
+== Copyright
+
+(c) 2020-2021 VMware, Inc. or its affiliates. \ No newline at end of file
diff --git a/deps/rabbitmq_stream_common/include/rabbit_stream.hrl b/deps/rabbitmq_stream_common/include/rabbit_stream.hrl
new file mode 100644
index 0000000000..e552cc7a02
--- /dev/null
+++ b/deps/rabbitmq_stream_common/include/rabbit_stream.hrl
@@ -0,0 +1,112 @@
+-define(COMMAND_DECLARE_PUBLISHER, 1).
+-define(COMMAND_PUBLISH, 2).
+-define(COMMAND_PUBLISH_CONFIRM, 3).
+-define(COMMAND_PUBLISH_ERROR, 4).
+-define(COMMAND_QUERY_PUBLISHER_SEQUENCE, 5).
+-define(COMMAND_DELETE_PUBLISHER, 6).
+-define(COMMAND_SUBSCRIBE, 7).
+-define(COMMAND_DELIVER, 8).
+-define(COMMAND_CREDIT, 9).
+-define(COMMAND_STORE_OFFSET, 10).
+-define(COMMAND_QUERY_OFFSET, 11).
+-define(COMMAND_UNSUBSCRIBE, 12).
+-define(COMMAND_CREATE_STREAM, 13).
+-define(COMMAND_DELETE_STREAM, 14).
+-define(COMMAND_METADATA, 15).
+-define(COMMAND_METADATA_UPDATE, 16).
+-define(COMMAND_PEER_PROPERTIES, 17).
+-define(COMMAND_SASL_HANDSHAKE, 18).
+-define(COMMAND_SASL_AUTHENTICATE, 19).
+-define(COMMAND_TUNE, 20).
+-define(COMMAND_OPEN, 21).
+-define(COMMAND_CLOSE, 22).
+-define(COMMAND_HEARTBEAT, 23).
+-define(COMMAND_ROUTE, 24).
+-define(COMMAND_PARTITIONS, 25).
+
+-define(REQUEST, 0).
+-define(RESPONSE, 1).
+
+-define(VERSION_1, 1).
+
+-define(RESPONSE_CODE_OK, 1).
+-define(RESPONSE_CODE_STREAM_DOES_NOT_EXIST, 2).
+-define(RESPONSE_CODE_SUBSCRIPTION_ID_ALREADY_EXISTS, 3).
+-define(RESPONSE_CODE_SUBSCRIPTION_ID_DOES_NOT_EXIST, 4).
+-define(RESPONSE_CODE_STREAM_ALREADY_EXISTS, 5).
+-define(RESPONSE_CODE_STREAM_NOT_AVAILABLE, 6).
+-define(RESPONSE_SASL_MECHANISM_NOT_SUPPORTED, 7).
+-define(RESPONSE_AUTHENTICATION_FAILURE, 8).
+-define(RESPONSE_SASL_ERROR, 9).
+-define(RESPONSE_SASL_CHALLENGE, 10).
+-define(RESPONSE_SASL_AUTHENTICATION_FAILURE_LOOPBACK, 11).
+-define(RESPONSE_VHOST_ACCESS_FAILURE, 12).
+-define(RESPONSE_CODE_UNKNOWN_FRAME, 13).
+-define(RESPONSE_CODE_FRAME_TOO_LARGE, 14).
+-define(RESPONSE_CODE_INTERNAL_ERROR, 15).
+-define(RESPONSE_CODE_ACCESS_REFUSED, 16).
+-define(RESPONSE_CODE_PRECONDITION_FAILED, 17).
+-define(RESPONSE_CODE_PUBLISHER_DOES_NOT_EXIST, 18).
+-define(RESPONSE_CODE_NO_OFFSET, 19).
+
+
+-define(OFFSET_TYPE_FIRST, 1).
+-define(OFFSET_TYPE_LAST, 2).
+-define(OFFSET_TYPE_NEXT, 3).
+-define(OFFSET_TYPE_OFFSET, 4).
+-define(OFFSET_TYPE_TIMESTAMP, 5).
+
+-define(DEFAULT_INITIAL_CREDITS, 50000).
+-define(DEFAULT_CREDITS_REQUIRED_FOR_UNBLOCKING, 12500).
+-define(DEFAULT_FRAME_MAX, 1048576). %% 1 MiB
+-define(DEFAULT_HEARTBEAT, 60). %% 60 seconds
+
+-define(STREAM_QUEUE_TYPE, rabbit_stream_queue).
+
+-define(INFO_ITEMS,
+ [conn_name,
+ port,
+ peer_port,
+ host,
+ peer_cert_issuer,
+ peer_cert_subject,
+ peer_cert_validity,
+ peer_host,
+ user,
+ vhost,
+ subscriptions,
+ ssl,
+ ssl_cipher,
+ ssl_hash,
+ ssl_key_exchange,
+ ssl_protocol,
+ connection_state,
+ auth_mechanism,
+ heartbeat,
+ frame_max,
+ client_properties,
+ connected_at
+ ]).
+
+-define(CONSUMER_INFO_ITEMS, [
+ connection_pid,
+ subscription_id,
+ stream,
+ messages_consumed,
+ offset,
+ offset_lag,
+ credits,
+ properties
+ ]).
+
+-define(PUBLISHER_INFO_ITEMS, [
+ connection_pid,
+ publisher_id,
+ stream,
+ reference,
+ messages_published,
+ messages_confirmed,
+ messages_errored
+ ]).
+
+-define(STREAM_GUIDE_URL, <<"https://rabbitmq.com/stream.html">>).
diff --git a/deps/rabbitmq_stream_common/rebar.config b/deps/rabbitmq_stream_common/rebar.config
new file mode 100644
index 0000000000..17a6ccdc63
--- /dev/null
+++ b/deps/rabbitmq_stream_common/rebar.config
@@ -0,0 +1,12 @@
+{plugins, [rebar3_format]}.
+
+{format, [
+ {files, ["src/*.erl", "test/*.erl"]},
+ {formatter, default_formatter},
+ {options, #{
+ paper => 80,
+ ribbon => 70,
+ inline_attributes => {when_under, 1},
+ inline_items => {when_under, 4}
+ }}
+]}. \ No newline at end of file
diff --git a/deps/rabbitmq_stream_common/src/rabbit_stream_core.erl b/deps/rabbitmq_stream_common/src/rabbit_stream_core.erl
new file mode 100644
index 0000000000..d16b887df1
--- /dev/null
+++ b/deps/rabbitmq_stream_common/src/rabbit_stream_core.erl
@@ -0,0 +1,1005 @@
+-module(rabbit_stream_core).
+
+-include("rabbit_stream.hrl").
+
+-export([init/1,
+ incoming_data/2,
+ next_command/1,
+ all_commands/1,
+ frame/1,
+ parse_command/1]).
+
+%% holds static or rarely changing fields
+-record(cfg, {}).
+-record(?MODULE,
+ {cfg :: #cfg{},
+ frames = [] :: [iodata()],
+ %% partial data
+ data ::
+ undefined |
+ %% this is only if the binary is smaller than 4 bytes
+ binary() |
+ {RemainingBytes :: non_neg_integer(), iodata()},
+ commands = queue:new() :: queue:queue(command())}).
+
+-opaque state() :: #?MODULE{}.
+
+%% for parsing
+-define(STRING(Size, Str), Size:16, Str:Size / binary).
+%% for pickling
+-define(STRING(Str), (byte_size(Str)):16, Str / binary).
+-define(DATASTR(Str), (byte_size(Str)):32, Str / binary).
+
+-export_type([state/0]).
+
+-type correlation_id() :: non_neg_integer().
+%% publishing sequence number
+-type publishing_id() :: non_neg_integer().
+-type publisher_id() :: 0..255.
+-type subscription_id() :: 0..255.
+-type writer_ref() :: binary().
+-type stream_name() :: binary().
+-type offset_spec() :: osiris:offset_spec().
+-type response_code() ::
+ ?RESPONSE_CODE_OK |
+ ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST |
+ ?RESPONSE_CODE_SUBSCRIPTION_ID_ALREADY_EXISTS |
+ ?RESPONSE_CODE_SUBSCRIPTION_ID_DOES_NOT_EXIST |
+ ?RESPONSE_SASL_MECHANISM_NOT_SUPPORTED |
+ ?RESPONSE_AUTHENTICATION_FAILURE |
+ ?RESPONSE_SASL_ERROR |
+ ?RESPONSE_SASL_CHALLENGE |
+ ?RESPONSE_SASL_AUTHENTICATION_FAILURE_LOOPBACK |
+ ?RESPONSE_VHOST_ACCESS_FAILURE |
+ ?RESPONSE_CODE_UNKNOWN_FRAME |
+ ?RESPONSE_CODE_FRAME_TOO_LARGE |
+ ?RESPONSE_CODE_INTERNAL_ERROR |
+ ?RESPONSE_CODE_ACCESS_REFUSED |
+ ?RESPONSE_CODE_PRECONDITION_FAILED |
+ ?RESPONSE_CODE_PUBLISHER_DOES_NOT_EXIST |
+ ?RESPONSE_CODE_NO_OFFSET.
+-type error_code() :: response_code().
+-type sequence() :: non_neg_integer().
+-type credit() :: non_neg_integer().
+-type offset_ref() :: binary().
+-type endpoint() :: {Host :: binary(), Port :: non_neg_integer()}.
+-type command() ::
+ {publish,
+ publisher_id(),
+ MessageCount :: non_neg_integer(),
+ Payload :: binary() | iolist()} |
+ {publish_confirm, publisher_id(), [publishing_id()]} |
+ {publish_error, publisher_id(), error_code(), [publishing_id()]} |
+ {deliver, subscription_id(), Chunk :: binary()} |
+ {credit, subscription_id(), Credit :: non_neg_integer()} |
+ {metadata_update, stream_name(), response_code()} |
+ {store_offset, offset_ref(), stream_name(), osiris:offset()} |
+ heartbeat |
+ {tune, FrameMax :: non_neg_integer(),
+ HeartBeat :: non_neg_integer()} |
+ {request, correlation_id(),
+ {declare_publisher, publisher_id(), writer_ref(), stream_name()} |
+ {query_publisher_sequence, writer_ref(), stream_name()} |
+ {delete_publisher, publisher_id()} |
+ {subscribe,
+ subscription_id(),
+ stream_name(),
+ offset_spec(),
+ credit(),
+ Properties :: #{binary() => binary()}} |
+ {query_offset, offset_ref(), stream_name()} |
+ {unsubscribe, subscription_id()} |
+ {create_stream, stream_name(), Args :: #{binary() => binary()}} |
+ {delete_stream, stream_name()} |
+ {metadata, [stream_name()]} |
+ {peer_properties, #{binary() => binary()}} |
+ sasl_handshake |
+ {sasl_authenticate, Mechanism :: binary(), SaslFragment :: binary()} |
+ {open, VirtualHost :: binary()} |
+ {close, Code :: non_neg_integer(), Reason :: binary()} |
+ {route, RoutingKey :: binary(), SuperStream :: binary()} |
+ {partitions, SuperStream :: binary()}} |
+ {response, correlation_id(),
+ {declare_publisher |
+ delete_publisher |
+ subscribe |
+ unsubscribe |
+ create_stream |
+ delete_stream |
+ close |
+ sasl_authenticate,
+ response_code()} |
+ {query_publisher_sequence, response_code(), sequence()} |
+ {open, response_code(), #{binary() => binary()}} |
+ {query_offset, response_code(), osiris:offset()} |
+ {metadata, Endpoints :: [endpoint()],
+ Metadata ::
+ #{stream_name() =>
+ stream_not_found | stream_not_available |
+ {Leader :: endpoint() | undefined, Replicas :: [endpoint()]}}} |
+ {peer_properties, response_code(), #{binary() => binary()}} |
+ {sasl_handshake, response_code(), Mechanisms :: [binary()]} |
+ {sasl_authenticate, response_code(), Challenge :: binary()} |
+ {tune, FrameMax :: non_neg_integer(),
+ HeartBeat :: non_neg_integer()} |
+ {credit, response_code(), subscription_id()} |
+ {route, response_code(), stream_name()} |
+ {partitions, response_code(), [stream_name()]}} |
+ {unknown, binary()}.
+
+-spec init(term()) -> state().
+init(_) ->
+ #?MODULE{cfg = #cfg{}}.
+
+-spec next_command(state()) -> {command(), state()} | empty.
+next_command(#?MODULE{commands = Commands0} = State) ->
+ case queue:out(Commands0) of
+ {{value, Cmd}, Commands} ->
+ {Cmd, State#?MODULE{commands = Commands}};
+ {empty, _} ->
+ empty
+ end.
+
+-spec all_commands(state()) -> {[command()], state()}.
+all_commands(#?MODULE{commands = Commands0} = State) ->
+ {queue:to_list(Commands0), State#?MODULE{commands = queue:new()}}.
+
+%% returns frames
+-spec incoming_data(binary(), state()) -> state().
+%% TODO: check max frame size
+incoming_data(<<>>,
+ #?MODULE{frames = Frames, commands = Commands} = State) ->
+ State#?MODULE{frames = [], commands = parse_frames(Frames, Commands)};
+incoming_data(<<Size:32, Frame:Size/binary, Rem/binary>>,
+ #?MODULE{frames = Frames, data = undefined} = State) ->
+ incoming_data(Rem,
+ State#?MODULE{frames = [Frame | Frames], data = undefined});
+incoming_data(<<Size:32, Rem/binary>>,
+ #?MODULE{frames = Frames,
+ data = undefined,
+ commands = Commands} =
+ State) ->
+ %% not enough data to complete frame, stash and await more data
+ State#?MODULE{frames = [],
+ data = {Size - byte_size(Rem), Rem},
+ commands = parse_frames(Frames, Commands)};
+incoming_data(Data,
+ #?MODULE{frames = Frames,
+ data = undefined,
+ commands = Commands} =
+ State)
+ when byte_size(Data) < 4 ->
+ %% not enough data to even know the size required
+ %% just stash binary and hit last clause next
+ State#?MODULE{frames = [],
+ data = Data,
+ commands = parse_frames(Frames, Commands)};
+incoming_data(Data,
+ #?MODULE{frames = Frames,
+ data = {Size, Partial},
+ commands = Commands} =
+ State) ->
+ case Data of
+ <<Part:Size/binary, Rem/binary>> ->
+ incoming_data(Rem,
+ State#?MODULE{frames =
+ [append_data(Partial, Part)
+ | Frames],
+ data = undefined});
+ Rem ->
+ State#?MODULE{frames = [],
+ data =
+ {Size - byte_size(Rem),
+ append_data(Partial, Rem)},
+ commands = parse_frames(Frames, Commands)}
+ end;
+incoming_data(Data, #?MODULE{data = Partial} = State)
+ when is_binary(Partial) ->
+ incoming_data(<<Partial/binary, Data/binary>>,
+ State#?MODULE{data = undefined}).
+
+parse_frames(Frames, Queue) ->
+ lists:foldr(fun(Frame, Acc) -> queue:in(parse_command(Frame), Acc)
+ end,
+ Queue, Frames).
+
+-spec frame(command()) -> iodata().
+frame({publish_confirm, PublisherId, PublishingIds}) ->
+ PubIds =
+ lists:foldl(fun(PublishingId, Acc) -> <<Acc/binary, PublishingId:64>>
+ end,
+ <<>>, PublishingIds),
+ PublishingIdCount = length(PublishingIds),
+ wrap_in_frame([<<?REQUEST:1,
+ ?COMMAND_PUBLISH_CONFIRM:15,
+ ?VERSION_1:16,
+ PublisherId:8,
+ PublishingIdCount:32>>,
+ PubIds]);
+frame({publish, PublisherId, MessageCount, Payload}) ->
+ wrap_in_frame([<<?REQUEST:1,
+ ?COMMAND_PUBLISH:15,
+ ?VERSION_1:16,
+ PublisherId:8,
+ MessageCount:32>>,
+ Payload]);
+frame({deliver, SubscriptionId, Chunk}) ->
+ wrap_in_frame([<<?REQUEST:1,
+ ?COMMAND_DELIVER:15,
+ ?VERSION_1:16,
+ SubscriptionId:8>>,
+ Chunk]);
+frame({metadata_update, Stream, ResponseCode}) ->
+ StreamSize = byte_size(Stream),
+ wrap_in_frame(<<?REQUEST:1,
+ ?COMMAND_METADATA_UPDATE:15,
+ ?VERSION_1:16,
+ ResponseCode:16,
+ StreamSize:16,
+ Stream/binary>>);
+frame({store_offset, Reference, Stream, Offset}) ->
+ ReferenceSize = byte_size(Reference),
+ StreamSize = byte_size(Stream),
+ wrap_in_frame(<<?REQUEST:1,
+ ?COMMAND_STORE_OFFSET:15,
+ ?VERSION_1:16,
+ ReferenceSize:16,
+ Reference/binary,
+ StreamSize:16,
+ Stream/binary,
+ Offset:64>>);
+frame(heartbeat) ->
+ wrap_in_frame(<<?REQUEST:1, ?COMMAND_HEARTBEAT:15, ?VERSION_1:16>>);
+frame({credit, SubscriptionId, Credit}) ->
+ wrap_in_frame(<<?REQUEST:1,
+ ?COMMAND_CREDIT:15,
+ ?VERSION_1:16,
+ SubscriptionId:8,
+ Credit:16/signed>>);
+frame({tune, FrameMax, Heartbeat}) ->
+ %% tune can also be a response, which is weird
+ wrap_in_frame(<<?REQUEST:1,
+ ?COMMAND_TUNE:15,
+ ?VERSION_1:16,
+ FrameMax:32,
+ Heartbeat:32>>);
+frame({publish_error, PublisherId, ErrCode, PublishingIds}) ->
+ Details =
+ iolist_to_binary(lists:foldr(fun(PubId, Acc) ->
+ [<<PubId:64, ErrCode:16>> | Acc]
+ end,
+ [], PublishingIds)),
+ wrap_in_frame(<<?REQUEST:1,
+ ?COMMAND_PUBLISH_ERROR:15,
+ ?VERSION_1:16,
+ PublisherId:8,
+ (length(PublishingIds)):32,
+ Details/binary>>);
+frame({request, CorrelationId, Body}) ->
+ {CmdTag, BodyBin} = request_body(Body),
+ CmdId = command_id(CmdTag),
+ wrap_in_frame([<<?REQUEST:1,
+ CmdId:15,
+ ?VERSION_1:16,
+ CorrelationId:32>>,
+ BodyBin]);
+frame({response, _CorrelationId, {credit, Code, SubscriptionId}}) ->
+ %% specical case as credit response does not write correlationid!
+ wrap_in_frame(<<?RESPONSE:1,
+ ?COMMAND_CREDIT:15,
+ ?VERSION_1:16,
+ Code:16,
+ SubscriptionId:8>>);
+frame({response, CorrelationId, {Tag, Code}})
+ when is_integer(Code) andalso is_atom(Tag) ->
+ %% standard response without payload
+ CmdId = command_id(Tag),
+ wrap_in_frame(<<?RESPONSE:1,
+ CmdId:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ Code:16>>);
+frame({response, _Corr, {tune, FrameMax, Heartbeat}}) ->
+ wrap_in_frame(<<?RESPONSE:1,
+ ?COMMAND_TUNE:15,
+ ?VERSION_1:16,
+ FrameMax:32,
+ Heartbeat:32>>);
+frame({response, Corr, {open, ResponseCode, ConnectionProperties}}) ->
+ ConnPropsCount = map_size(ConnectionProperties),
+ ConnectionPropertiesBin =
+ case ConnPropsCount of
+ 0 ->
+ <<>>;
+ _ ->
+ PropsBin = generate_map(ConnectionProperties),
+ [<<ConnPropsCount:32>>, PropsBin]
+ end,
+ wrap_in_frame([<<?RESPONSE:1,
+ ?COMMAND_OPEN:15,
+ ?VERSION_1:16,
+ Corr:32,
+ ResponseCode:16>>,
+ ConnectionPropertiesBin]);
+frame({response, CorrelationId, Body}) ->
+ {CommandId, BodyBin} = response_body(Body),
+ wrap_in_frame([<<?RESPONSE:1,
+ CommandId:15,
+ ?VERSION_1:16,
+ CorrelationId:32>>,
+ BodyBin]);
+frame(Command) ->
+ exit({not_impl, Command}).
+
+response_body({peer_properties, Code, Props}) ->
+ Init = <<Code:16, (maps:size(Props)):32>>,
+ {command_id(peer_properties),
+ maps:fold(fun(Key, Value, Acc) ->
+ KeySize = byte_size(Key),
+ ValueSize = byte_size(Value),
+ <<Acc/binary,
+ KeySize:16,
+ Key:KeySize/binary,
+ ValueSize:16,
+ Value:ValueSize/binary>>
+ end,
+ Init, Props)};
+response_body({sasl_handshake, Code, Mechanisms}) ->
+ MechanismsBin =
+ lists:foldl(fun(M, Acc) ->
+ Size = byte_size(M),
+ <<Acc/binary, Size:16, M:Size/binary>>
+ end,
+ <<>>, Mechanisms),
+ MechanismsCount = length(Mechanisms),
+ {command_id(sasl_handshake),
+ <<Code:16, MechanismsCount:32, MechanismsBin/binary>>};
+response_body({sasl_authenticate = Tag, Code, Challenge}) ->
+ {command_id(Tag),
+ case Challenge of
+ <<>> ->
+ <<Code:16>>;
+ _ ->
+ <<Code:16, ?STRING(Challenge)>>
+ end};
+response_body({query_publisher_sequence = Tag, Code, Sequence}) ->
+ {command_id(Tag), <<Code:16, Sequence:64>>};
+response_body({query_offset = Tag, Code, Offset}) ->
+ {command_id(Tag), <<Code:16, Offset:64>>};
+response_body({metadata = Tag, Endpoints, Metadata}) ->
+ NumEps = length(Endpoints),
+ {_, EndpointsBin} =
+ lists:foldl(fun({Host, Port}, {Index, Acc}) ->
+ HostLength = byte_size(Host),
+ {Index + 1,
+ <<Acc/binary,
+ Index:16,
+ HostLength:16,
+ Host:HostLength/binary,
+ Port:32>>}
+ end,
+ {0, <<NumEps:32>>}, Endpoints),
+ MetadataBin =
+ maps:fold(fun (Stream, Info, Acc) when is_atom(Info) ->
+ Code =
+ case Info of
+ stream_not_found ->
+ ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST;
+ stream_not_available ->
+ ?RESPONSE_CODE_STREAM_NOT_AVAILABLE
+ end,
+ StreamLength = byte_size(Stream),
+ [<<StreamLength:16,
+ Stream/binary,
+ Code:16,
+ (-1):16,
+ 0:32>>
+ | Acc];
+ (Stream, {LeaderEp, ReplicaEps}, Acc) ->
+ LeaderIndex = element_index(LeaderEp, Endpoints),
+ ReplicasBin =
+ iolist_to_binary(lists:foldr(fun(Repl, A) ->
+ I =
+ element_index(Repl,
+ Endpoints),
+ [<<I:16>> | A]
+ end,
+ [], ReplicaEps)),
+ StreamLength = byte_size(Stream),
+ ReplicasCount = length(ReplicaEps),
+ [<<StreamLength:16,
+ Stream/binary,
+ ?RESPONSE_CODE_OK:16,
+ LeaderIndex:16,
+ ReplicasCount:32,
+ ReplicasBin/binary>>
+ | Acc]
+ end,
+ [], Metadata),
+
+ NumStreams = map_size(Metadata),
+ {command_id(Tag), [EndpointsBin, <<NumStreams:32>>, MetadataBin]};
+response_body({route = Tag, Code, Stream}) ->
+ {command_id(Tag), <<Code:16, ?STRING(Stream)>>};
+response_body({partitions = Tag, Code, Streams}) ->
+ StreamsBin = [<<?STRING(Stream)>> || Stream <- Streams],
+ {command_id(Tag), [<<Code:16, (length(Streams)):32>>, StreamsBin]}.
+
+request_body({declare_publisher = Tag,
+ PublisherId,
+ WriterRef,
+ Stream}) ->
+ {Tag, <<PublisherId:8, ?STRING(WriterRef), ?STRING(Stream)>>};
+request_body({query_publisher_sequence = Tag, WriterRef, Stream}) ->
+ {Tag, <<?STRING(WriterRef), ?STRING(Stream)>>};
+request_body({delete_publisher = Tag, PublisherId}) ->
+ {Tag, <<PublisherId:8>>};
+request_body({subscribe,
+ SubscriptionId,
+ Stream,
+ OffsetSpec,
+ Credit}) ->
+ request_body({subscribe,
+ SubscriptionId,
+ Stream,
+ OffsetSpec,
+ Credit,
+ #{}});
+request_body({subscribe = Tag,
+ SubscriptionId,
+ Stream,
+ OffsetSpec,
+ Credit,
+ Properties}) ->
+ Data =
+ case OffsetSpec of
+ first ->
+ <<?OFFSET_TYPE_FIRST:16, Credit:16>>;
+ last ->
+ <<?OFFSET_TYPE_LAST:16, Credit:16>>;
+ next ->
+ <<?OFFSET_TYPE_NEXT:16, Credit:16>>;
+ Offset when is_integer(Offset) ->
+ <<?OFFSET_TYPE_OFFSET:16, Offset:64/unsigned, Credit:16>>;
+ {timestamp, Timestamp} ->
+ <<?OFFSET_TYPE_TIMESTAMP:16, Timestamp:64/signed, Credit:16>>
+ end,
+ PropertiesBin =
+ case map_size(Properties) of
+ 0 ->
+ <<>>;
+ _ ->
+ PropsBin = generate_map(Properties),
+ [<<(map_size(Properties)):32>>, PropsBin]
+ end,
+ {Tag,
+ [<<SubscriptionId:8, ?STRING(Stream), Data/binary>> | PropertiesBin]};
+request_body({store_offset = Tag, OffsetRef, Stream, Offset}) ->
+ {Tag, <<?STRING(OffsetRef), ?STRING(Stream), Offset:64>>};
+request_body({query_offset = Tag, OffsetRef, Stream}) ->
+ {Tag, <<?STRING(OffsetRef), ?STRING(Stream)>>};
+request_body({unsubscribe = Tag, SubscriptionId}) ->
+ {Tag, <<SubscriptionId:8>>};
+request_body({create_stream = Tag, Stream, Args}) ->
+ ArgsBin = generate_map(Args),
+ {Tag, [<<?STRING(Stream), (map_size(Args)):32>>, ArgsBin]};
+request_body({delete_stream = Tag, Stream}) ->
+ {Tag, <<?STRING(Stream)>>};
+request_body({metadata = Tag, Streams}) ->
+ StreamsBin =
+ lists:foldr(fun(Stream, Acc) -> [<<?STRING(Stream)>> | Acc] end, [],
+ Streams),
+ {Tag, [<<(length(Streams)):32>>, StreamsBin]};
+request_body({peer_properties = Tag, Props}) ->
+ PropsBin = generate_map(Props),
+ {Tag, [<<(map_size(Props)):32>>, PropsBin]};
+request_body(sasl_handshake = Tag) ->
+ {Tag, <<>>};
+request_body({sasl_authenticate = Tag, Mechanism, SaslBin}) ->
+ {Tag,
+ case SaslBin of
+ <<>> ->
+ <<?STRING(Mechanism), (-1):32/signed>>;
+ _ ->
+ <<?STRING(Mechanism), ?DATASTR(SaslBin)>>
+ end};
+request_body({open = Tag, Vhost}) ->
+ {Tag, <<?STRING(Vhost)>>};
+request_body({close = Tag, Code, Reason}) ->
+ {Tag, <<Code:16, ?STRING(Reason)>>};
+request_body({route = Tag, RoutingKey, SuperStream}) ->
+ {Tag, <<?STRING(RoutingKey), ?STRING(SuperStream)>>};
+request_body({partitions = Tag, SuperStream}) ->
+ {Tag, <<?STRING(SuperStream)>>}.
+
+append_data(Prev, Data) when is_binary(Prev) ->
+ [Prev, Data];
+append_data(Prev, Data) when is_list(Prev) ->
+ Prev ++ [Data].
+
+wrap_in_frame(IOData) ->
+ Size = iolist_size(IOData),
+ [<<Size:32>> | IOData].
+
+parse_command(<<?REQUEST:1, _:15, _/binary>> = Bin) ->
+ parse_request(Bin);
+parse_command(<<?RESPONSE:1, _:15, _/binary>> = Bin) ->
+ parse_response(Bin);
+parse_command(Data) when is_list(Data) ->
+ %% TODO: most commands are rare or small and likely to be a single
+ %% binary, however publish and delivery should be parsed from the
+ %% iodata rather than turned into a binary
+ parse_command(iolist_to_binary(Data)).
+
+-spec parse_request(binary()) -> command().
+parse_request(<<?REQUEST:1,
+ ?COMMAND_PUBLISH:15,
+ ?VERSION_1:16,
+ PublisherId:8/unsigned,
+ MessageCount:32,
+ Messages/binary>>) ->
+ {publish, PublisherId, MessageCount, Messages};
+parse_request(<<?REQUEST:1,
+ ?COMMAND_PUBLISH_CONFIRM:15,
+ ?VERSION_1:16,
+ PublisherId:8,
+ _Count:32,
+ PublishingIds/binary>>) ->
+ {publish_confirm, PublisherId, list_of_longs(PublishingIds)};
+parse_request(<<?REQUEST:1,
+ ?COMMAND_DELIVER:15,
+ ?VERSION_1:16,
+ SubscriptionId:8,
+ Chunk/binary>>) ->
+ {deliver, SubscriptionId, Chunk};
+parse_request(<<?REQUEST:1,
+ ?COMMAND_CREDIT:15,
+ ?VERSION_1:16,
+ SubscriptionId:8,
+ Credit:16/signed>>) ->
+ {credit, SubscriptionId, Credit};
+parse_request(<<?REQUEST:1,
+ ?COMMAND_PUBLISH_ERROR:15,
+ ?VERSION_1:16,
+ PublisherId:8,
+ _Count:32,
+ DetailsBin/binary>>) ->
+ %% TODO: change protocol to match
+ [{_, ErrCode} | _] = Details = list_of_longcodes(DetailsBin),
+ {PublishingIds, _} = lists:unzip(Details),
+ {publish_error, PublisherId, ErrCode, PublishingIds};
+parse_request(<<?REQUEST:1,
+ ?COMMAND_METADATA_UPDATE:15,
+ ?VERSION_1:16,
+ ResponseCode:16,
+ StreamSize:16,
+ Stream:StreamSize/binary>>) ->
+ {metadata_update, Stream, ResponseCode};
+parse_request(<<?REQUEST:1,
+ ?COMMAND_STORE_OFFSET:15,
+ ?VERSION_1:16,
+ ?STRING(RefSize, OffsetRef),
+ ?STRING(SSize, Stream),
+ Offset:64>>) ->
+ {store_offset, OffsetRef, Stream, Offset};
+parse_request(<<?REQUEST:1, ?COMMAND_HEARTBEAT:15, ?VERSION_1:16>>) ->
+ heartbeat;
+parse_request(<<?REQUEST:1,
+ ?COMMAND_DECLARE_PUBLISHER:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ PublisherId:8,
+ ?STRING(WriterRefSize, WriterRef),
+ ?STRING(StreamSize, Stream)>>) ->
+ request(CorrelationId,
+ {declare_publisher, PublisherId, WriterRef, Stream});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_QUERY_PUBLISHER_SEQUENCE:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ?STRING(WSize, WriterReference),
+ ?STRING(SSize, Stream)>>) ->
+ request(CorrelationId,
+ {query_publisher_sequence, WriterReference, Stream});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_DELETE_PUBLISHER:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ PublisherId:8>>) ->
+ request(CorrelationId, {delete_publisher, PublisherId});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_SUBSCRIBE:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ SubscriptionId:8,
+ ?STRING(StreamSize, Stream),
+ OffsetType:16/signed,
+ OffsetCreditProperties/binary>>) ->
+ {OffsetSpec, Credit, PropsBin} =
+ case OffsetType of
+ ?OFFSET_TYPE_FIRST ->
+ <<Crdt:16, PropertiesBin/binary>> = OffsetCreditProperties,
+ {first, Crdt, PropertiesBin};
+ ?OFFSET_TYPE_LAST ->
+ <<Crdt:16, PropertiesBin/binary>> = OffsetCreditProperties,
+ {last, Crdt, PropertiesBin};
+ ?OFFSET_TYPE_NEXT ->
+ <<Crdt:16, PropertiesBin/binary>> = OffsetCreditProperties,
+ {next, Crdt, PropertiesBin};
+ ?OFFSET_TYPE_OFFSET ->
+ <<Offset:64/unsigned, Crdt:16, PropertiesBin/binary>> =
+ OffsetCreditProperties,
+ {Offset, Crdt, PropertiesBin};
+ ?OFFSET_TYPE_TIMESTAMP ->
+ <<Timestamp:64/signed, Crdt:16, PropertiesBin/binary>> =
+ OffsetCreditProperties,
+ {{timestamp, Timestamp}, Crdt, PropertiesBin}
+ end,
+ Properties =
+ case PropsBin of
+ <<>> ->
+ #{};
+ <<_Count:32, Bin/binary>> ->
+ parse_map(Bin, #{});
+ _ ->
+ logger:warning("Incorrect binary for subscription properties: ~w",
+ [PropsBin]),
+ #{}
+ end,
+ request(CorrelationId,
+ {subscribe,
+ SubscriptionId,
+ Stream,
+ OffsetSpec,
+ Credit,
+ Properties});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_QUERY_OFFSET:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ?STRING(RefSize, OffsetRef),
+ ?STRING(SSize, Stream)>>) ->
+ request(CorrelationId, {query_offset, OffsetRef, Stream});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_UNSUBSCRIBE:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ SubscriptionId:8>>) ->
+ request(CorrelationId, {unsubscribe, SubscriptionId});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_CREATE_STREAM:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ?STRING(StreamSize, Stream),
+ _ArgumentsCount:32,
+ ArgumentsBinary/binary>>) ->
+ Args = parse_map(ArgumentsBinary, #{}),
+ request(CorrelationId, {create_stream, Stream, Args});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_DELETE_STREAM:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ?STRING(StreamSize, Stream)>>) ->
+ request(CorrelationId, {delete_stream, Stream});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_METADATA:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ _StreamCount:32,
+ BinaryStreams/binary>>) ->
+ Streams = list_of_strings(BinaryStreams),
+ request(CorrelationId, {metadata, Streams});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_PEER_PROPERTIES:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ _PropertiesCount:32,
+ PropertiesBinary/binary>>) ->
+ Props = parse_map(PropertiesBinary, #{}),
+ request(CorrelationId, {peer_properties, Props});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_SASL_HANDSHAKE:15,
+ ?VERSION_1:16,
+ CorrelationId:32>>) ->
+ request(CorrelationId, sasl_handshake);
+parse_request(<<?REQUEST:1,
+ ?COMMAND_SASL_AUTHENTICATE:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ?STRING(MechanismSize, Mechanism),
+ SaslFragment/binary>>) ->
+ SaslBin =
+ case SaslFragment of
+ <<(-1):32/signed>> ->
+ <<>>;
+ <<SaslBinaryLength:32, SaslBinary:SaslBinaryLength/binary>> ->
+ SaslBinary
+ end,
+ request(CorrelationId, {sasl_authenticate, Mechanism, SaslBin});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_TUNE:15,
+ ?VERSION_1:16,
+ FrameMax:32,
+ Heartbeat:32>>) ->
+ {tune, FrameMax, Heartbeat};
+parse_request(<<?REQUEST:1,
+ ?COMMAND_OPEN:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ?STRING(VhostSize, VirtualHost)>>) ->
+ request(CorrelationId, {open, VirtualHost});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_CLOSE:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ CloseCode:16,
+ ?STRING(ReasonSize, Reason)>>) ->
+ request(CorrelationId, {close, CloseCode, Reason});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_ROUTE:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ?STRING(RKeySize, RoutingKey),
+ ?STRING(StreamSize, SuperStream)>>) ->
+ request(CorrelationId, {route, RoutingKey, SuperStream});
+parse_request(<<?REQUEST:1,
+ ?COMMAND_PARTITIONS:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ?STRING(StreamSize, SuperStream)>>) ->
+ request(CorrelationId, {partitions, SuperStream});
+parse_request(Bin) ->
+ {unknown, Bin}.
+
+parse_response(<<?RESPONSE:1,
+ CommandId:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ ResponseCode:16>>) ->
+ {response, CorrelationId,
+ {parse_command_id(CommandId), ResponseCode}};
+parse_response(<<?RESPONSE:1,
+ ?COMMAND_TUNE:15,
+ ?VERSION_1:16,
+ FrameMax:32,
+ Heartbeat:32>>) ->
+ %% fake correlation id
+ {response, 0, {tune, FrameMax, Heartbeat}};
+parse_response(<<?RESPONSE:1,
+ CommandId:15,
+ ?VERSION_1:16,
+ CorrelationId:32,
+ Data/binary>>) ->
+ {response, CorrelationId, parse_response_body(CommandId, Data)};
+parse_response(<<?RESPONSE:1,
+ ?COMMAND_CREDIT:15,
+ ?VERSION_1:16,
+ ResponseCode:16,
+ SubscriptionId:8>>) ->
+ {response, 0, {credit, ResponseCode, SubscriptionId}};
+parse_response(Bin) ->
+ {unknown, Bin}.
+
+parse_response_body(?COMMAND_OPEN, <<ResponseCode:16>>) ->
+ {open, ResponseCode, #{}};
+parse_response_body(?COMMAND_OPEN,
+ <<ResponseCode:16, _ConnectionPropertiesCount:32,
+ ConnectionProperties/binary>>) ->
+ {open, ResponseCode, parse_map(ConnectionProperties, #{})};
+parse_response_body(?COMMAND_QUERY_PUBLISHER_SEQUENCE,
+ <<ResponseCode:16, Sequence:64>>) ->
+ {query_publisher_sequence, ResponseCode, Sequence};
+parse_response_body(?COMMAND_QUERY_OFFSET,
+ <<ResponseCode:16, Offset:64>>) ->
+ {query_offset, ResponseCode, Offset};
+parse_response_body(?COMMAND_METADATA,
+ <<NumNodes:32, Data/binary>>) ->
+ {NodesLookup, <<_:32, MetadataBin/binary>>} =
+ parse_nodes(Data, NumNodes, #{}),
+ Nodes = maps:values(NodesLookup),
+ Metadata = parse_meta(MetadataBin, NodesLookup, #{}),
+ {metadata, Nodes, Metadata};
+parse_response_body(?COMMAND_PEER_PROPERTIES,
+ <<ResponseCode:16, _Count:32, PropertiesBin/binary>>) ->
+ Props = parse_map(PropertiesBin, #{}),
+ {peer_properties, ResponseCode, Props};
+parse_response_body(?COMMAND_SASL_HANDSHAKE,
+ <<ResponseCode:16, _Count:32, MechanismsBin/binary>>) ->
+ Props = list_of_strings(MechanismsBin),
+ {sasl_handshake, ResponseCode, Props};
+parse_response_body(?COMMAND_SASL_AUTHENTICATE,
+ <<ResponseCode:16, ChallengeBin/binary>>) ->
+ Challenge =
+ case ChallengeBin of
+ <<?STRING(CSize, Chall)>> ->
+ Chall;
+ <<>> ->
+ <<>>
+ end,
+ {sasl_authenticate, ResponseCode, Challenge};
+parse_response_body(?COMMAND_ROUTE,
+ <<ResponseCode:16, ?STRING(StreamSize, Stream)>>) ->
+ {route, ResponseCode, Stream};
+parse_response_body(?COMMAND_PARTITIONS,
+ <<ResponseCode:16, _Count:32, PartitionsBin/binary>>) ->
+ Partitions = list_of_strings(PartitionsBin),
+ {partitions, ResponseCode, Partitions}.
+
+request(Corr, Cmd) ->
+ {request, Corr, Cmd}.
+
+parse_meta(<<>>, _Nodes, Acc) ->
+ Acc;
+parse_meta(<<?STRING(StreamSize, Stream),
+ Code:16,
+ LeaderIndex:16,
+ ReplicaCount:32,
+ ReplicaIndexBin:(ReplicaCount * 2)/binary,
+ Rem/binary>>,
+ Nodes, Acc) ->
+ StreamDetail =
+ case Code of
+ ?RESPONSE_CODE_OK ->
+ %% TODO: 65535 is the magic value for a leader
+ %% that is not found everything else should crash
+ Leader = maps:get(LeaderIndex, Nodes, undefined),
+ Replicas = maps:with(list_of_shorts(ReplicaIndexBin), Nodes),
+ {Leader, maps:values(Replicas)};
+ ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST ->
+ stream_not_found;
+ ?RESPONSE_CODE_STREAM_NOT_AVAILABLE ->
+ stream_not_available
+ end,
+ parse_meta(Rem, Nodes, Acc#{Stream => StreamDetail}).
+
+parse_nodes(Rem, 0, Acc) ->
+ {Acc, Rem};
+parse_nodes(<<Index:16,
+ ?STRING(HostSize, Host),
+ Port:32,
+ Rem/binary>>,
+ C, Acc) ->
+ parse_nodes(Rem, C - 1, Acc#{Index => {Host, Port}}).
+
+parse_map(<<>>, Acc) ->
+ Acc;
+parse_map(<<?STRING(KeySize, Key), ?STRING(ValSize, Value),
+ Rem/binary>>,
+ Acc) ->
+ parse_map(Rem, Acc#{Key => Value}).
+
+generate_map(Map) ->
+ maps:fold(fun(K, V, Acc) -> [<<?STRING(K), ?STRING(V)>> | Acc] end,
+ [], Map).
+
+list_of_strings(<<>>) ->
+ [];
+list_of_strings(<<?STRING(Size, String), Rem/binary>>) ->
+ [String | list_of_strings(Rem)].
+
+list_of_longs(<<>>) ->
+ [];
+list_of_longs(<<I:64, Rem/binary>>) ->
+ [I | list_of_longs(Rem)].
+
+list_of_shorts(<<>>) ->
+ [];
+list_of_shorts(<<I:16, Rem/binary>>) ->
+ [I | list_of_shorts(Rem)].
+
+list_of_longcodes(<<>>) ->
+ [];
+list_of_longcodes(<<I:64, C:16, Rem/binary>>) ->
+ [{I, C} | list_of_longcodes(Rem)].
+
+command_id(declare_publisher) ->
+ ?COMMAND_DECLARE_PUBLISHER;
+command_id(publish) ->
+ ?COMMAND_PUBLISH;
+command_id(publish_confirm) ->
+ ?COMMAND_PUBLISH_CONFIRM;
+command_id(publish_error) ->
+ ?COMMAND_PUBLISH_ERROR;
+command_id(query_publisher_sequence) ->
+ ?COMMAND_QUERY_PUBLISHER_SEQUENCE;
+command_id(delete_publisher) ->
+ ?COMMAND_DELETE_PUBLISHER;
+command_id(subscribe) ->
+ ?COMMAND_SUBSCRIBE;
+command_id(deliver) ->
+ ?COMMAND_DELIVER;
+command_id(credit) ->
+ ?COMMAND_CREDIT;
+command_id(store_offset) ->
+ ?COMMAND_STORE_OFFSET;
+command_id(query_offset) ->
+ ?COMMAND_QUERY_OFFSET;
+command_id(unsubscribe) ->
+ ?COMMAND_UNSUBSCRIBE;
+command_id(create_stream) ->
+ ?COMMAND_CREATE_STREAM;
+command_id(delete_stream) ->
+ ?COMMAND_DELETE_STREAM;
+command_id(metadata) ->
+ ?COMMAND_METADATA;
+command_id(metadata_update) ->
+ ?COMMAND_METADATA_UPDATE;
+command_id(peer_properties) ->
+ ?COMMAND_PEER_PROPERTIES;
+command_id(sasl_handshake) ->
+ ?COMMAND_SASL_HANDSHAKE;
+command_id(sasl_authenticate) ->
+ ?COMMAND_SASL_AUTHENTICATE;
+command_id(tune) ->
+ ?COMMAND_TUNE;
+command_id(open) ->
+ ?COMMAND_OPEN;
+command_id(close) ->
+ ?COMMAND_CLOSE;
+command_id(heartbeat) ->
+ ?COMMAND_HEARTBEAT;
+command_id(route) ->
+ ?COMMAND_ROUTE;
+command_id(partitions) ->
+ ?COMMAND_PARTITIONS.
+
+parse_command_id(?COMMAND_DECLARE_PUBLISHER) ->
+ declare_publisher;
+parse_command_id(?COMMAND_PUBLISH) ->
+ publish;
+parse_command_id(?COMMAND_PUBLISH_CONFIRM) ->
+ publish_confirm;
+parse_command_id(?COMMAND_PUBLISH_ERROR) ->
+ publish_error;
+parse_command_id(?COMMAND_QUERY_PUBLISHER_SEQUENCE) ->
+ query_publisher_sequence;
+parse_command_id(?COMMAND_DELETE_PUBLISHER) ->
+ delete_publisher;
+parse_command_id(?COMMAND_SUBSCRIBE) ->
+ subscribe;
+parse_command_id(?COMMAND_DELIVER) ->
+ deliver;
+parse_command_id(?COMMAND_CREDIT) ->
+ credit;
+parse_command_id(?COMMAND_STORE_OFFSET) ->
+ store_offset;
+parse_command_id(?COMMAND_QUERY_OFFSET) ->
+ query_offset;
+parse_command_id(?COMMAND_UNSUBSCRIBE) ->
+ unsubscribe;
+parse_command_id(?COMMAND_CREATE_STREAM) ->
+ create_stream;
+parse_command_id(?COMMAND_DELETE_STREAM) ->
+ delete_stream;
+parse_command_id(?COMMAND_METADATA) ->
+ metadata;
+parse_command_id(?COMMAND_METADATA_UPDATE) ->
+ metadata_update;
+parse_command_id(?COMMAND_PEER_PROPERTIES) ->
+ peer_properties;
+parse_command_id(?COMMAND_SASL_HANDSHAKE) ->
+ sasl_handshake;
+parse_command_id(?COMMAND_SASL_AUTHENTICATE) ->
+ sasl_authenticate;
+parse_command_id(?COMMAND_TUNE) ->
+ tune;
+parse_command_id(?COMMAND_OPEN) ->
+ open;
+parse_command_id(?COMMAND_CLOSE) ->
+ close;
+parse_command_id(?COMMAND_HEARTBEAT) ->
+ heartbeat;
+parse_command_id(?COMMAND_ROUTE) ->
+ route;
+parse_command_id(?COMMAND_PARTITIONS) ->
+ partitions.
+
+element_index(Element, List) ->
+ element_index(Element, List, 0).
+
+element_index(Element, [Element | _], N) ->
+ N;
+element_index(Element, [_ | List], N) ->
+ element_index(Element, List, N + 1);
+element_index(_, _, _) ->
+ -1.
diff --git a/deps/rabbitmq_stream_common/test/rabbit_stream_core_SUITE.erl b/deps/rabbitmq_stream_common/test/rabbit_stream_core_SUITE.erl
new file mode 100644
index 0000000000..74b0cc1d60
--- /dev/null
+++ b/deps/rabbitmq_stream_common/test/rabbit_stream_core_SUITE.erl
@@ -0,0 +1,168 @@
+-module(rabbit_stream_core_SUITE).
+
+-compile(nowarn_export_all).
+-compile(export_all).
+
+%% Common Test callb
+% -include_lib("proper/include/proper.hrl").
+% -include_lib("common_test/include/ct.hrl").
+
+-include("rabbit_stream.hrl").
+
+%%%===================================================================
+%%% Common Test callbacks
+%%%===================================================================
+
+all() ->
+ [{group, tests}].
+
+suite() ->
+ [{timetrap, {seconds, 30}}].
+
+groups() ->
+ [{tests, [],
+ [roundtrip, roundtrip_metadata, roundtrip_metadata_no_leader]}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+group(_GroupName) ->
+ [].
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, _Config) ->
+ ok.
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_TestCase, _Config) ->
+ ok.
+
+%%%===================================================================
+%%% Test cases
+%%%===================================================================
+
+roundtrip(_Config) ->
+ test_roundtrip({publish, 42, 1, <<"payload">>}),
+ test_roundtrip({publish_confirm, 42, [1, 2, 3]}),
+
+ test_roundtrip({publish_error,
+ 42,
+ ?RESPONSE_CODE_STREAM_DOES_NOT_EXIST,
+ [2, 3, 4]}),
+ test_roundtrip({deliver, 53, <<"chunk">>}),
+ test_roundtrip({credit, 53, 12}),
+ test_roundtrip({metadata_update, <<"stream1">>,
+ ?RESPONSE_VHOST_ACCESS_FAILURE}),
+ test_roundtrip({store_offset, <<"offset_ref">>, <<"stream">>, 12}),
+ test_roundtrip(heartbeat),
+ test_roundtrip({tune, 53, 12}),
+ %% REQUESTS
+ test_roundtrip({request, 99,
+ {declare_publisher,
+ 42,
+ <<"writer_ref">>,
+ <<"stream_name">>}}),
+ test_roundtrip({request, 99,
+ {query_publisher_sequence, <<"writer_ref">>,
+ <<"stream_name">>}}),
+ test_roundtrip({request, 99, {delete_publisher, 42}}),
+ [test_roundtrip({request, 99,
+ {subscribe, 53, <<"stream_name">>, Spec, 12, #{}}})
+ || Spec
+ <- [last,
+ next,
+ first,
+ 65432,
+ {timestamp, erlang:system_time(millisecond)}]],
+ test_roundtrip({request, 99,
+ {query_offset, <<"offset_ref">>, <<"stream">>}}),
+ test_roundtrip({request, 99, {unsubscribe, 53}}),
+ Args = #{<<"arg1">> => <<"arg1_value">>},
+ test_roundtrip({request, 99,
+ {create_stream, <<"stream_name">>, Args}}),
+ test_roundtrip({request, 99,
+ {create_stream, <<"stream_name">>, #{}}}),
+ test_roundtrip({request, 99, {delete_stream, <<"stream_name">>}}),
+ test_roundtrip({request, 99,
+ {metadata, [<<"stream1">>, <<"stream2">>]}}),
+ test_roundtrip({request, 99,
+ {peer_properties, #{<<"k1">> => <<"v1">>}}}),
+ test_roundtrip({request, 99, sasl_handshake}),
+ test_roundtrip({request, 99,
+ {sasl_authenticate, <<"mechanism">>, <<>>}}),
+ test_roundtrip({request, 99,
+ {sasl_authenticate, <<"mechanism">>, <<"challenge">>}}),
+ test_roundtrip({request, 99, {open, <<"vhost">>}}),
+ test_roundtrip({request, 99, {close, 99, <<"reason">>}}),
+ test_roundtrip({request, 99, {route, <<"rkey.*">>, <<"exchange">>}}),
+ test_roundtrip({request, 99, {partitions, <<"super stream">>}}),
+ %% RESPONSES
+ [test_roundtrip({response, 99, {Tag, 53}})
+ || Tag
+ <- [declare_publisher,
+ delete_publisher,
+ subscribe,
+ unsubscribe,
+ create_stream,
+ delete_stream,
+ open,
+ close]],
+
+ test_roundtrip({response, 99, {query_publisher_sequence, 98, 1234}}),
+ test_roundtrip({response, 99, {query_offset, 1, 12}}),
+
+ test_roundtrip({response, 99,
+ {peer_properties, 1, #{<<"k1">> => <<"v1">>}}}),
+
+ test_roundtrip({response, 99,
+ {sasl_handshake, 1, [<<"m1">>, <<"m2">>]}}),
+ test_roundtrip({response, 99,
+ {sasl_authenticate, 1, <<"challenge">>}}),
+ test_roundtrip({response, 0, {tune, 10000, 12345}}),
+ % %% NB: does not write correlation id
+ test_roundtrip({response, 0, {credit, 98, 200}}),
+ % %% TODO should route return a list of routed streams?
+ test_roundtrip({response, 99, {route, 1, <<"stream_name">>}}),
+ test_roundtrip({response, 99,
+ {partitions, 1, [<<"stream1">>, <<"stream2">>]}}),
+ ok.
+
+roundtrip_metadata(_Config) ->
+ Host1 = {<<"host1">>, 1234},
+ Host2 = {<<"host2">>, 1235},
+ Host3 = {<<"host3">>, 1236},
+ Endpoints = [Host1, Host2, Host3],
+ Metadata =
+ #{<<"stream1">> => {Host1, [Host2, Host3]},
+ <<"stream2">> => stream_not_found,
+ <<"stream3">> => stream_not_available},
+ test_roundtrip({response, 1, {metadata, Endpoints, Metadata}}),
+ ok.
+
+roundtrip_metadata_no_leader(_Config) ->
+ Host1 = {<<"host1">>, 1234},
+ Host2 = {<<"host2">>, 1235},
+ Host3 = {<<"host3">>, 1236},
+ Endpoints = [Host1, Host2, Host3],
+ Metadata =
+ #{<<"stream1">> => {undefined, [Host2, Host3]},
+ <<"stream2">> => stream_not_found,
+ <<"stream3">> => stream_not_available},
+ Cmd = {response, 1, {metadata, Endpoints, Metadata}},
+ test_roundtrip(Cmd),
+ ok.
+
+test_roundtrip(Cmd) ->
+ Init = rabbit_stream_core:init(undefined),
+ Frame = iolist_to_binary(rabbit_stream_core:frame(Cmd)),
+ {[Cmd], _} =
+ rabbit_stream_core:all_commands(
+ rabbit_stream_core:incoming_data(Frame, Init)),
+ ok.
diff --git a/deps/rabbitmq_stream_management/.gitignore b/deps/rabbitmq_stream_management/.gitignore
new file mode 100644
index 0000000000..2fa1c77e87
--- /dev/null
+++ b/deps/rabbitmq_stream_management/.gitignore
@@ -0,0 +1,60 @@
+.eunit
+*.o
+*.beam
+*.plt
+erl_crash.dump
+.concrete/DEV_MODE
+
+# rebar 2.x
+.rebar
+rel/example_project
+ebin/*.beam
+deps
+
+# rebar 3
+.rebar3
+_build/
+_checkouts/
+
+erl_crash.dump
+.sw?
+.*.sw?
+*.beam
+/.erlang.mk/
+/cover/
+/deps/
+/ebin/
+/escript/
+/escript.lock
+/logs/
+/plugins/
+/plugins.lock
+/sbin/
+/sbin.lock
+/xrefr
+elvis
+callgrind*
+ct.coverdata
+test/ct.cover.spec
+_build
+
+rabbitmq_stream.d
+*.plt
+*.d
+
+*.jar
+
+
+*~
+.sw?
+.*.sw?
+*.beam
+*.class
+*.dat
+*.dump
+*.iml
+*.ipr
+*.iws
+.DS_Store
+\#~
+/.idea/
diff --git a/deps/rabbitmq_stream_management/BUILD.bazel b/deps/rabbitmq_stream_management/BUILD.bazel
new file mode 100644
index 0000000000..e629fbd7a6
--- /dev/null
+++ b/deps/rabbitmq_stream_management/BUILD.bazel
@@ -0,0 +1,73 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_stream_management"
+
+APP_DESCRIPTION = "RabbitMQ Stream Management"
+
+APP_MODULE = "rabbit_stream_management"
+
+BUILD_DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/rabbitmq_management:bazel_erlang_lib",
+ "//deps/rabbitmq_stream:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+rabbitmq_home(
+ name = "broker-for-tests-home",
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_amqp1_0:bazel_erlang_lib",
+ ":bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ home = ":broker-for-tests-home",
+)
+
+PACKAGE = "deps/rabbitmq_stream_management"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "http_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_stream_management/CODE_OF_CONDUCT.md b/deps/rabbitmq_stream_management/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..1f6ef1c576
--- /dev/null
+++ b/deps/rabbitmq_stream_management/CODE_OF_CONDUCT.md
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+ without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](http://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](http://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_stream_management/CONTRIBUTING.md b/deps/rabbitmq_stream_management/CONTRIBUTING.md
new file mode 100644
index 0000000000..339d097deb
--- /dev/null
+++ b/deps/rabbitmq_stream_management/CONTRIBUTING.md
@@ -0,0 +1,38 @@
+## Overview
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+## How to Contribute
+
+The process is fairly standard:
+
+ * Fork the repository or repositories you plan on contributing to
+ * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella)
+ * `cd umbrella`, `make co`
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first ask the core team
+of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed copy of our
+[Contributor Agreement](https://cla.pivotal.io/) around the time
+you submit your pull request. This will make it much easier (in some cases, possible)
+for the RabbitMQ team at Pivotal to merge your contribution.
+
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
diff --git a/deps/rabbitmq_stream_management/LICENSE b/deps/rabbitmq_stream_management/LICENSE
new file mode 100644
index 0000000000..669a2bf450
--- /dev/null
+++ b/deps/rabbitmq_stream_management/LICENSE
@@ -0,0 +1,5 @@
+This package, the RabbitMQ server is licensed under the MPL 2.0. For the
+MPL 2.0, please see LICENSE-MPL-RabbitMQ.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com. \ No newline at end of file
diff --git a/deps/rabbitmq_stream_management/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_stream_management/LICENSE-MPL-RabbitMQ
new file mode 100644
index 0000000000..c4b20dbc66
--- /dev/null
+++ b/deps/rabbitmq_stream_management/LICENSE-MPL-RabbitMQ
@@ -0,0 +1,370 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+The Original Code is RabbitMQ.
+
+The Initial Developer of the Original Code is Pivotal Software, Inc.
+Copyright (c) 2020 VMware, Inc or its affiliates. All rights reserved. \ No newline at end of file
diff --git a/deps/rabbitmq_stream_management/Makefile b/deps/rabbitmq_stream_management/Makefile
new file mode 100644
index 0000000000..1d66057f23
--- /dev/null
+++ b/deps/rabbitmq_stream_management/Makefile
@@ -0,0 +1,24 @@
+PROJECT = rabbitmq_stream_management
+PROJECT_DESCRIPTION = RabbitMQ Stream Management
+PROJECT_MOD = rabbit_stream_management
+
+define PROJECT_ENV
+[
+]
+endef
+
+
+DEPS = rabbit rabbitmq_management rabbitmq_stream
+TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers
+
+DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-plugin.mk
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_stream_management/README.adoc b/deps/rabbitmq_stream_management/README.adoc
new file mode 100644
index 0000000000..54c691dd16
--- /dev/null
+++ b/deps/rabbitmq_stream_management/README.adoc
@@ -0,0 +1,23 @@
+= RabbitMQ Stream Management Plugin
+
+== Project Maturity
+
+The project is in early stages of development and is considered experimental.
+It is not ready for production use.
+
+== Support
+
+* For questions: https://groups.google.com/forum/#!forum/rabbitmq-users[RabbitMQ Users]
+* For bugs and feature requests: https://github.com/rabbitmq/rabbitmq-server/issues[GitHub Issues]
+
+The project is currently under development, there is no guarantee yet that it will be maintained and supported
+in the future (read: you are welcome to experiment with it and give feedback, but please do not base
+your whole business on it).
+
+== Licensing
+
+Released under the link:LICENSE-MPL-RabbitMQ[MPL 2.0].
+
+== Copyright
+
+(c) 2020-2021 VMware, Inc. or its affiliates. \ No newline at end of file
diff --git a/deps/rabbitmq_stream_management/priv/www/js/stream.js b/deps/rabbitmq_stream_management/priv/www/js/stream.js
new file mode 100644
index 0000000000..5dfbae9943
--- /dev/null
+++ b/deps/rabbitmq_stream_management/priv/www/js/stream.js
@@ -0,0 +1,83 @@
+dispatcher_add(function(sammy) {
+ sammy.get('#/stream/connections', function() {
+ renderStreamConnections();
+ });
+ sammy.get('#/stream/connections/:vhost/:name', function() {
+ var vhost = esc(this.params['vhost']);
+ var name = esc(this.params['name']);
+ render({'connection': {path: '/stream/connections/'+ vhost + '/' + name,
+ options: {ranges: ['data-rates-conn']}},
+ 'consumers': '/stream/connections/' + vhost + '/' + name + '/consumers',
+ 'publishers': '/stream/connections/' + vhost + '/' + name + '/publishers'},
+ 'streamConnection', '#/stream/connections');
+ });
+ // not exactly dispatcher stuff, but we have to make sure this is called before
+ // HTTP requests are made in case of refresh of the queue page
+ QUEUE_EXTRA_CONTENT_REQUESTS.push(function(vhost, queue) {
+ return {'extra_stream_publishers' : '/stream/publishers/' + esc(vhost) + '/' + esc(queue)};
+ });
+ QUEUE_EXTRA_CONTENT.push(function(queue, extraContent) {
+ if (is_stream(queue)) {
+ var publishers = extraContent['extra_stream_publishers'];
+ if (publishers !== undefined) {
+ return '<div class="section-hidden"><h2 class="updatable">Stream publishers (' + Object.keys(publishers).length +')</h2><div class="hider updatable">' +
+ format('streamPublishersList', {'publishers': publishers, 'mode': 'queue'}) +
+ '</div></div>';
+ } else {
+ return '';
+ }
+ } else {
+ return '';
+ }
+ });
+});
+
+NAVIGATION['Stream'] = ['#/stream/connections', "monitoring"];
+
+var ALL_STREAM_CONNECTION_COLUMNS =
+ {'Overview': [['user', 'User name', true],
+ ['state', 'State', true]],
+ 'Details': [['ssl', 'TLS', true],
+ ['ssl_info', 'TLS details', false],
+ ['protocol', 'Protocol', true],
+ ['frame_max', 'Frame max', false],
+ ['auth_mechanism', 'Auth mechanism', false],
+ ['client', 'Client', false]],
+ 'Network': [['from_client', 'From client', true],
+ ['to_client', 'To client', true],
+ ['heartbeat', 'Heartbeat', false],
+ ['connected_at', 'Connected at', false]]};
+
+var DISABLED_STATS_STREAM_CONNECTION_COLUMNS =
+ {'Overview': [['user', 'User name', true],
+ ['state', 'State', true]]};
+
+COLUMNS['streamConnections'] = disable_stats?DISABLED_STATS_STREAM_CONNECTION_COLUMNS:ALL_STREAM_CONNECTION_COLUMNS;
+
+function renderStreamConnections() {
+ render({'connections': {path: url_pagination_template_context('stream/connections', 'streamConnections', 1, 100),
+ options: {sort:true}}},
+ 'streamConnections', '#/stream/connections');
+}
+
+function link_stream_conn(vhost, name) {
+ return _link_to(short_conn(name), '#/stream/connections/' + esc(vhost) + '/' + esc(name));
+}
+
+RENDER_CALLBACKS['streamConnections'] = function() { renderStreamConnections() };
+
+CONSUMER_OWNER_FORMATTERS.push({
+ order: 0, formatter: function(consumer) {
+ if (consumer.consumer_tag.startsWith('stream.subid-')) {
+ return link_stream_conn(
+ consumer.queue.vhost,
+ consumer.channel_details.connection_name
+ );
+ } else {
+ return undefined;
+ }
+ }
+});
+
+CONSUMER_OWNER_FORMATTERS.sort(CONSUMER_OWNER_FORMATTERS_COMPARATOR);
+
diff --git a/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConnection.ejs b/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConnection.ejs
new file mode 100644
index 0000000000..571293bf48
--- /dev/null
+++ b/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConnection.ejs
@@ -0,0 +1,197 @@
+<h2>Connection <%= fmt_string(connection.name) %> <%= fmt_maybe_vhost(connection.vhost) %></h1>
+
+<% if (!disable_stats) { %>
+<div class="section">
+<h2>Overview</h2>
+<div class="hider updatable">
+ <%= data_rates('data-rates-conn', connection, 'Data rates') %>
+
+<h3>Details</h3>
+<table class="facts facts-l">
+<% if (nodes_interesting) { %>
+<tr>
+ <th>Node</th>
+ <td><%= fmt_node(connection.node) %></td>
+</tr>
+<% } %>
+
+<% if (connection.client_properties.connection_name) { %>
+<tr>
+ <th>Client-provided name</th>
+ <td><%= fmt_string(connection.client_properties.connection_name) %></td>
+</tr>
+<% } %>
+
+<tr>
+ <th>Username</th>
+ <td><%= fmt_string(connection.user) %></td>
+</tr>
+<tr>
+ <th>Protocol</th>
+ <td><%= connection.protocol %></td>
+</tr>
+<tr>
+ <th>Connected at</th>
+ <td><%= fmt_timestamp(connection.connected_at) %></td>
+</tr>
+
+<% if (connection.ssl) { %>
+<tr>
+ <th>SSL</th>
+ <td><%= fmt_boolean(connection.ssl) %></td>
+</tr>
+<% } %>
+
+<% if (connection.auth_mechanism) { %>
+<tr>
+ <th>Authentication</th>
+ <td><%= connection.auth_mechanism %></td>
+</tr>
+<% } %>
+</table>
+
+<% if (connection.state) { %>
+<table class="facts">
+<tr>
+ <th>State</th>
+ <td><%= fmt_object_state(connection) %></td>
+</tr>
+<tr>
+ <th>Heartbeat</th>
+ <td><%= fmt_time(connection.timeout, 's') %></td>
+</tr>
+<tr>
+ <th>Frame max</th>
+ <td><%= connection.frame_max %> bytes</td>
+</tr>
+</table>
+
+<% } %>
+
+</div>
+</div>
+
+<% if (connection.ssl) { %>
+<div class="section">
+<h2>SSL</h2>
+<div class="hider">
+
+<table class="facts">
+ <tr>
+ <th>Protocol Version</th>
+ <td><%= connection.ssl_protocol %></td>
+ </tr>
+ <tr>
+ <th>Key Exchange Algorithm</th>
+ <td><%= connection.ssl_key_exchange %></td>
+ </tr>
+ <tr>
+ <th>Cipher Algorithm</th>
+ <td><%= connection.ssl_cipher %></td>
+ </tr>
+ <tr>
+ <th>Hash Algorithm</th>
+ <td><%= connection.ssl_hash %></td>
+ </tr>
+</table>
+
+<% if (connection.peer_cert_issuer != '') { %>
+<table class="facts">
+ <tr>
+ <th>Peer Certificate Issuer</th>
+ <td><%= connection.peer_cert_issuer %></td>
+ </tr>
+ <tr>
+ <th>Peer Certificate Subject</th>
+ <td><%= connection.peer_cert_subject %></td>
+ </tr>
+ <tr>
+ <th>Peer Certificate Validity</th>
+ <td><%= connection.peer_cert_validity %></td>
+ </tr>
+</table>
+<% } %>
+</div>
+</div>
+<% } %>
+
+<div class="section">
+ <h2 class="updatable">Publishers (<%=(publishers.length)%>) </h2>
+ <div class="hider updatable">
+ <%= format('streamPublishersList', {'publishers': publishers, 'mode' : 'connection'}) %>
+ </div>
+</div>
+
+<div class="section">
+ <h2 class="updatable" >Consumers (<%=(consumers.length)%>)</h2>
+ <div class="hider updatable">
+ <%= format('streamConsumersList', {'consumers': consumers}) %>
+ </div>
+</div>
+
+<% if (properties_size(connection.client_properties) > 0) { %>
+<div class="section-hidden">
+<h2>Client properties</h2>
+<div class="hider">
+<%= fmt_table_long(connection.client_properties) %>
+</div>
+</div>
+<% } %>
+
+<% if(connection.reductions || connection.garbage_collection) { %>
+<div class="section-hidden">
+<h2>Runtime Metrics (Advanced)</h2>
+ <div class="hider updatable">
+ <%= data_reductions('reductions-rates-conn', connection) %>
+ <table class="facts">
+ <% if (connection.garbage_collection.min_bin_vheap_size) { %>
+ <tr>
+ <th>Minimum binary virtual heap size in words (min_bin_vheap_size)</th>
+ <td><%= connection.garbage_collection.min_bin_vheap_size %></td>
+ </tr>
+ <% } %>
+
+ <% if (connection.garbage_collection.min_heap_size) { %>
+ <tr>
+ <th>Minimum heap size in words (min_heap_size)</th>
+ <td><%= connection.garbage_collection.min_heap_size %></td>
+ </tr>
+ <% } %>
+
+ <% if (connection.garbage_collection.fullsweep_after) { %>
+ <tr>
+ <th>Maximum generational collections before fullsweep (fullsweep_after)</th>
+ <td><%= connection.garbage_collection.fullsweep_after %></td>
+ </tr>
+ <% } %>
+
+ <% if (connection.garbage_collection.minor_gcs) { %>
+ <tr>
+ <th>Number of minor GCs (minor_gcs)</th>
+ <td><%= connection.garbage_collection.minor_gcs %></td>
+ </tr>
+ <% } %>
+ </table>
+ </div>
+</div>
+
+<% } %>
+<% } %>
+
+<div class="section-hidden">
+ <h2>Close this connection</h2>
+ <div class="hider">
+ <form action="#/connections" method="delete" class="confirm">
+ <input type="hidden" name="name" value="<%= fmt_string(connection.name) %>"/>
+ <table class="form">
+ <tr>
+ <th><label>Reason:</label></th>
+ <td>
+ <input type="text" name="reason" value="Closed via management plugin" class="wide"/>
+ </td>
+ </tr>
+ </table>
+ <input type="submit" value="Force Close"/>
+ </form>
+ </div>
+</div>
diff --git a/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConnections.ejs b/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConnections.ejs
new file mode 100644
index 0000000000..43a2665a1b
--- /dev/null
+++ b/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConnections.ejs
@@ -0,0 +1,142 @@
+<div class="section">
+ <%= paginate_ui(connections, 'streamConnections', 'stream connections') %>
+</div>
+<div class="updatable">
+<% if (connections.items.length > 0) { %>
+<table class="list">
+ <thead>
+ <tr>
+ <%= group_heading('streamConnections', 'Overview', [vhosts_interesting, nodes_interesting, true]) %>
+ <% if (!disable_stats) { %>
+ <%= group_heading('streamConnections', 'Details', []) %>
+ <%= group_heading('streamConnections', 'Network', []) %>
+ <% } %>
+ <th class="plus-minus"><span class="popup-options-link" title="Click to change columns" type="columns" for="streamConnections">+/-</span></th>
+ </tr>
+ <tr>
+<% if (vhosts_interesting) { %>
+ <th><%= fmt_sort('Virtual host', 'vhost') %></th>
+<% } %>
+<% if(disable_stats) { %>
+ <th><%= fmt_sort('Name', 'name') %></th>
+<% } else { %>
+ <th><%= fmt_sort('Name', 'client_properties.connection_name') %></th>
+<% } %>
+<% if (nodes_interesting) { %>
+ <th><%= fmt_sort('Node', 'node') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'user')) { %>
+ <th><%= fmt_sort('User name', 'user') %></th>
+<% } %>
+<% if (!disable_stats) { %>
+<% if (show_column('streamConnections', 'state')) { %>
+ <th><%= fmt_sort('State', 'state') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'ssl')) { %>
+ <th><%= fmt_sort('SSL / TLS', 'ssl') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'ssl_info')) { %>
+ <th>SSL Details</th>
+<% } %>
+<% if (show_column('streamConnections', 'protocol')) { %>
+ <th><%= fmt_sort('Protocol', 'protocol') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'frame_max')) { %>
+ <th><%= fmt_sort('Frame max', 'frame_max') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'auth_mechanism')) { %>
+ <th><%= fmt_sort('Auth mechanism', 'auth_mechanism') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'client')) { %>
+ <th><%= fmt_sort('Client', 'properties') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'from_client')) { %>
+ <th><%= fmt_sort('From client', 'recv_oct_details.rate') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'to_client')) { %>
+ <th><%= fmt_sort('To client', 'send_oct_details.rate') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'heartbeat')) { %>
+ <th><%= fmt_sort('Heartbeat', 'timeout') %></th>
+<% } %>
+<% if (show_column('streamConnections', 'connected_at')) { %>
+ <th><%= fmt_sort('Connected at', 'connected_at') %></th>
+<% } %>
+<% } %>
+ </tr>
+ </thead>
+ <tbody>
+<%
+ for (var i = 0; i < connections.items.length; i++) {
+ var connection = connections.items[i];
+%>
+ <tr<%= alt_rows(i)%>>
+<% if (vhosts_interesting) { %>
+ <td><%= fmt_string(connection.vhost) %></td>
+<% } %>
+<% if(connection.client_properties) { %>
+ <td>
+ <%= link_stream_conn(connection.vhost, connection.name) %>
+ <sub><%= fmt_string(short_conn(connection.client_properties.connection_name)) %></sub>
+ </td>
+<% } else { %>
+ <td><%= link_stream_conn(connection.vhost, connection.name) %></td>
+<% } %>
+<% if (nodes_interesting) { %>
+ <td><%= fmt_node(connection.node) %></td>
+<% } %>
+<% if (show_column('streamConnections', 'user')) { %>
+ <td class="c"><%= fmt_string(connection.user) %></td>
+<% } %>
+<% if (!disable_stats) { %>
+<% if (show_column('streamConnections', 'state')) { %>
+ <td><%= fmt_object_state(connection) %></td>
+<% } %>
+<% if (show_column('streamConnections', 'ssl')) { %>
+ <td class="c"><%= fmt_boolean(connection.ssl, '') %></td>
+<% } %>
+<% if (show_column('streamConnections', 'ssl_info')) { %>
+ <td>
+ <% if (connection.ssl) { %>
+ <%= connection.ssl_protocol %>
+ <sub>
+ <%= connection.ssl_key_exchange %>
+ <%= connection.ssl_cipher %>
+ <%= connection.ssl_hash %>
+ </sub>
+ <% } %>
+ </td>
+<% } %>
+<% if (show_column('streamConnections', 'protocol')) { %>
+ <td class="c"><%= connection.protocol %></td>
+<% } %>
+<% if (show_column('streamConnections', 'frame_max')) { %>
+ <td class="r"><%= fmt_string(connection.frame_max, '') %></td>
+<% } %>
+<% if (show_column('streamConnections', 'auth_mechanism')) { %>
+ <td class="c"><%= fmt_string(connection.auth_mechanism, '') %></td>
+<% } %>
+<% if (show_column('streamConnections', 'client')) { %>
+ <td><%= fmt_client_name(connection.client_properties) %></td>
+<% } %>
+<% if (show_column('streamConnections', 'from_client')) { %>
+ <td><%= fmt_detail_rate_bytes(connection, 'recv_oct') %></td>
+<% } %>
+<% if (show_column('streamConnections', 'to_client')) { %>
+ <td><%= fmt_detail_rate_bytes(connection, 'send_oct') %></td>
+<% } %>
+<% if (show_column('streamConnections', 'heartbeat')) { %>
+ <td class="r"><%= fmt_time(connection.timeout, 's') %></td>
+<% } %>
+<% if (show_column('streamConnections', 'connected_at')) { %>
+ <td><%= fmt_timestamp_mini(connection.connected_at) %></td>
+<% } %>
+ <% } %>
+ </tr>
+ <% } %>
+ </tbody>
+</table>
+<% } else { %>
+ <p>... no connections ...</p>
+<% } %>
+</div> \ No newline at end of file
diff --git a/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConsumersList.ejs b/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConsumersList.ejs
new file mode 100644
index 0000000000..a91c0a0f0d
--- /dev/null
+++ b/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamConsumersList.ejs
@@ -0,0 +1,29 @@
+<% if (consumers.length > 0) { %>
+ <table class="list">
+ <thead>
+ <tr>
+ <th>Subscription ID</th>
+ <th>Stream</th>
+ <th>Messages Consumed</th>
+ <th>Offset</th>
+ <th>Offset Lag</th>
+ <th>Credits</th>
+ </tr>
+ </thead>
+<%
+ for (var i = 0; i < consumers.length; i++) {
+ var consumer = consumers[i];
+%>
+ <tr<%= alt_rows(i) %>>
+ <td><%= consumer.subscription_id %></td>
+ <td><%= link_queue(consumer.queue.vhost, consumer.queue.name) %></td>
+ <td class="c"><%= consumer.consumed %></td>
+ <td class="c"><%= consumer.offset %></td>
+ <td class="c"><%= consumer.offset_lag %></td>
+ <td class="c"><%= consumer.credits %></td>
+ </tr>
+<% } %>
+ </table>
+<% } else { %>
+ <p>... no consumers ...</p>
+<% } %>
diff --git a/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamPublishersList.ejs b/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamPublishersList.ejs
new file mode 100644
index 0000000000..903889175a
--- /dev/null
+++ b/deps/rabbitmq_stream_management/priv/www/js/tmpl/streamPublishersList.ejs
@@ -0,0 +1,41 @@
+<% if (publishers.length > 0) { %>
+ <table class="list">
+ <thead>
+ <tr>
+<% if (mode == 'queue') { %>
+ <th>Connection</th>
+ <th>ID</th>
+ <th>Reference</th>
+<% } else { %>
+ <th>ID</th>
+ <th>Reference</th>
+ <th>Queue</th>
+<% } %>
+ <th>Messages Published</th>
+ <th>Messages Confirmed</th>
+ <th>Messages Errored</th>
+ </tr>
+ </thead>
+<%
+ for (var i = 0; i < publishers.length; i++) {
+ var publisher = publishers[i];
+%>
+ <tr<%= alt_rows(i) %>>
+<% if (mode == 'queue') { %>
+ <td><%= link_stream_conn(publisher.queue.vhost, publisher.connection_details.name) %></td>
+ <td><%= publisher.publisher_id %></td>
+ <td class="c"><%= fmt_string(publisher.reference) %></td>
+<% } else { %>
+ <td><%= publisher.publisher_id %></td>
+ <td class="c"><%= fmt_string(publisher.reference) %></td>
+ <td><%= link_queue(publisher.queue.vhost, publisher.queue.name) %></td>
+<% } %>
+ <td class="c"><%= publisher.published %></td>
+ <td class="c"><%= publisher.confirmed %></td>
+ <td class="c"><%= publisher.errored %></td>
+ </tr>
+<% } %>
+ </table>
+<% } else { %>
+ <p>... no publishers ...</p>
+<% } %>
diff --git a/deps/rabbitmq_stream_management/rebar.config b/deps/rabbitmq_stream_management/rebar.config
new file mode 100644
index 0000000000..17a6ccdc63
--- /dev/null
+++ b/deps/rabbitmq_stream_management/rebar.config
@@ -0,0 +1,12 @@
+{plugins, [rebar3_format]}.
+
+{format, [
+ {files, ["src/*.erl", "test/*.erl"]},
+ {formatter, default_formatter},
+ {options, #{
+ paper => 80,
+ ribbon => 70,
+ inline_attributes => {when_under, 1},
+ inline_items => {when_under, 4}
+ }}
+]}. \ No newline at end of file
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_connection_consumers_mgmt.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_connection_consumers_mgmt.erl
new file mode 100644
index 0000000000..f7627ed131
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_connection_consumers_mgmt.erl
@@ -0,0 +1,72 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_connection_consumers_mgmt).
+
+-behaviour(rabbit_mgmt_extension).
+
+-export([dispatcher/0,
+ web_ui/0]).
+-export([init/2,
+ to_json/2,
+ content_types_provided/2,
+ is_authorized/2]).
+-export([resource_exists/2]).
+-export([variances/2]).
+
+-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+dispatcher() ->
+ case rabbit_stream_management_utils:is_feature_flag_enabled() of
+ true -> [{"/stream/connections/:vhost/:connection/consumers", ?MODULE, []}];
+ false -> []
+ end.
+
+
+web_ui() ->
+ [].
+
+%%--------------------------------------------------------------------
+init(Req, _State) ->
+ {cowboy_rest,
+ rabbit_mgmt_headers:set_common_permission_headers(Req, ?MODULE),
+ #context{}}.
+
+variances(Req, Context) ->
+ {[<<"accept-encoding">>, <<"origin">>], Req, Context}.
+
+content_types_provided(ReqData, Context) ->
+ {rabbit_mgmt_util:responder_map(to_json), ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+ case rabbit_mgmt_wm_connection:conn(ReqData) of
+ error ->
+ {false, ReqData, Context};
+ not_found ->
+ {false, ReqData, Context};
+ _Conn ->
+ {true, ReqData, Context}
+ end.
+
+to_json(ReqData, Context) ->
+ Pid = proplists:get_value(pid,
+ rabbit_mgmt_wm_connection:conn(ReqData)),
+ Consumers =
+ rabbit_mgmt_format:strip_pids(
+ rabbit_stream_mgmt_db:get_connection_consumers(Pid)),
+ rabbit_mgmt_util:reply_list(Consumers, ReqData, Context).
+
+is_authorized(ReqData, Context) ->
+ try
+ rabbit_mgmt_util:is_authorized_user(ReqData, Context,
+ rabbit_mgmt_wm_connection:conn(ReqData))
+ catch
+ {error, invalid_range_parameters, Reason} ->
+ rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData,
+ Context)
+ end.
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_connection_mgmt.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_connection_mgmt.erl
new file mode 100644
index 0000000000..47085d52f0
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_connection_mgmt.erl
@@ -0,0 +1,159 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_connection_mgmt).
+
+-behaviour(rabbit_mgmt_extension).
+
+-export([dispatcher/0,
+ web_ui/0]).
+-export([init/2,
+ resource_exists/2,
+ to_json/2,
+ content_types_provided/2,
+ is_authorized/2,
+ allowed_methods/2,
+ delete_resource/2]).
+-export([variances/2]).
+
+-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+dispatcher() ->
+ case rabbit_stream_management_utils:is_feature_flag_enabled() of
+ true -> [{"/stream/connections/:vhost/:connection", ?MODULE, []}];
+ false -> []
+ end.
+
+
+web_ui() ->
+ [].
+
+%%--------------------------------------------------------------------
+
+init(Req, _State) ->
+ {cowboy_rest,
+ rabbit_mgmt_headers:set_common_permission_headers(Req, ?MODULE),
+ #context{}}.
+
+variances(Req, Context) ->
+ {[<<"accept-encoding">>, <<"origin">>], Req, Context}.
+
+content_types_provided(ReqData, Context) ->
+ {rabbit_mgmt_util:responder_map(to_json), ReqData, Context}.
+
+allowed_methods(ReqData, Context) ->
+ {[<<"HEAD">>, <<"GET">>, <<"DELETE">>, <<"OPTIONS">>], ReqData,
+ Context}.
+
+resource_exists(ReqData, Context) ->
+ case conn(ReqData) of
+ not_found ->
+ {false, ReqData, Context};
+ _Conn ->
+ {true, ReqData, Context}
+ end.
+
+to_json(ReqData, Context) ->
+ case rabbit_mgmt_util:disable_stats(ReqData) of
+ false ->
+ rabbit_mgmt_util:reply(
+ maps:from_list(
+ rabbit_mgmt_format:strip_pids(conn_stats(ReqData))),
+ ReqData, Context);
+ true ->
+ rabbit_mgmt_util:reply([{name,
+ rabbit_mgmt_util:id(connection, ReqData)}],
+ ReqData, Context)
+ end.
+
+delete_resource(ReqData, Context) ->
+ case conn(ReqData) of
+ not_found ->
+ ok;
+ Conn ->
+ case proplists:get_value(pid, Conn) of
+ undefined ->
+ ok;
+ Pid when is_pid(Pid) ->
+ force_close_connection(ReqData, Pid)
+ end
+ end,
+ {true, ReqData, Context}.
+
+is_authorized(ReqData, Context) ->
+ try
+ rabbit_mgmt_util:is_authorized_user(ReqData, Context, conn(ReqData))
+ catch
+ {error, invalid_range_parameters, Reason} ->
+ rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData,
+ Context)
+ end.
+
+%%--------------------------------------------------------------------
+
+conn(ReqData) ->
+ case rabbit_mgmt_util:disable_stats(ReqData) of
+ false ->
+ conn_stats(ReqData);
+ true ->
+ VHost = rabbit_mgmt_util:id(vhost, ReqData),
+ case rabbit_connection_tracking:lookup(
+ rabbit_mgmt_util:id(connection, ReqData))
+ of
+ #tracked_connection{name = Name,
+ pid = Pid,
+ username = Username,
+ type = Type,
+ protocol = <<"stream">>,
+ vhost = VHost} ->
+ [{name, Name}, {pid, Pid}, {user, Username}, {type, Type}];
+ #tracked_connection{} ->
+ not_found;
+ not_found ->
+ not_found
+ end
+ end.
+
+conn_stats(ReqData) ->
+ case rabbit_mgmt_db:get_connection(
+ rabbit_mgmt_util:id(connection, ReqData),
+ rabbit_mgmt_util:range_ceil(ReqData))
+ of
+ not_found ->
+ not_found;
+ Connection ->
+ VHost = rabbit_mgmt_util:id(vhost, ReqData),
+ case
+ {rabbit_stream_management_utils:is_stream_connection(Connection),
+ same_vhost(VHost, Connection)}
+ of
+ {true, true} ->
+ Connection;
+ _ ->
+ not_found
+ end
+ end.
+
+same_vhost(Vhost, Connection) ->
+ case lists:keyfind(vhost, 1, Connection) of
+ {vhost, Vhost} ->
+ true;
+ _ ->
+ false
+ end.
+
+force_close_connection(ReqData, Pid) ->
+ Reason =
+ case cowboy_req:header(<<"x-reason">>, ReqData) of
+ undefined ->
+ "Closed via management plugin";
+ V ->
+ binary_to_list(V)
+ end,
+ gen_server:call(Pid, {shutdown, Reason}, infinity),
+ ok.
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_connection_publishers_mgmt.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_connection_publishers_mgmt.erl
new file mode 100644
index 0000000000..82690468eb
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_connection_publishers_mgmt.erl
@@ -0,0 +1,72 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_connection_publishers_mgmt).
+
+-behaviour(rabbit_mgmt_extension).
+
+-export([dispatcher/0,
+ web_ui/0]).
+-export([init/2,
+ to_json/2,
+ content_types_provided/2,
+ is_authorized/2]).
+-export([resource_exists/2]).
+-export([variances/2]).
+
+-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+dispatcher() ->
+ case rabbit_stream_management_utils:is_feature_flag_enabled() of
+ true -> [{"/stream/connections/:vhost/:connection/publishers", ?MODULE, []}];
+ false -> []
+ end.
+
+
+web_ui() ->
+ [].
+
+%%--------------------------------------------------------------------
+init(Req, _State) ->
+ {cowboy_rest,
+ rabbit_mgmt_headers:set_common_permission_headers(Req, ?MODULE),
+ #context{}}.
+
+variances(Req, Context) ->
+ {[<<"accept-encoding">>, <<"origin">>], Req, Context}.
+
+content_types_provided(ReqData, Context) ->
+ {rabbit_mgmt_util:responder_map(to_json), ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+ case rabbit_mgmt_wm_connection:conn(ReqData) of
+ error ->
+ {false, ReqData, Context};
+ not_found ->
+ {false, ReqData, Context};
+ _Conn ->
+ {true, ReqData, Context}
+ end.
+
+to_json(ReqData, Context) ->
+ Pid = proplists:get_value(pid,
+ rabbit_mgmt_wm_connection:conn(ReqData)),
+ Publishers =
+ rabbit_mgmt_format:strip_pids(
+ rabbit_stream_mgmt_db:get_connection_publishers(Pid)),
+ rabbit_mgmt_util:reply_list(Publishers, ReqData, Context).
+
+is_authorized(ReqData, Context) ->
+ try
+ rabbit_mgmt_util:is_authorized_user(ReqData, Context,
+ rabbit_mgmt_wm_connection:conn(ReqData))
+ catch
+ {error, invalid_range_parameters, Reason} ->
+ rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData,
+ Context)
+ end.
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_connections_mgmt.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_connections_mgmt.erl
new file mode 100644
index 0000000000..7b8279c1f2
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_connections_mgmt.erl
@@ -0,0 +1,77 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_connections_mgmt).
+
+-behaviour(rabbit_mgmt_extension).
+
+-export([dispatcher/0,
+ web_ui/0]).
+-export([init/2,
+ to_json/2,
+ content_types_provided/2,
+ is_authorized/2]).
+
+-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
+
+dispatcher() ->
+ case rabbit_stream_management_utils:is_feature_flag_enabled() of
+ true -> [{"/stream/connections", ?MODULE, []}];
+ false -> []
+ end.
+
+
+web_ui() ->
+ case rabbit_stream_management_utils:is_feature_flag_enabled() of
+ true -> [{javascript, <<"stream.js">>}];
+ false -> rabbit_log:warning(
+ "Unable to show the stream management plugin. The stream_queue feature flag is disabled. "++
+ "Enable stream_queue feature flag then disable and re-enable the rabbitmq_stream_management plugin. ",
+ "See https://www.rabbitmq.com/feature-flags.html to learn more",
+ []),
+ []
+ end.
+
+%%--------------------------------------------------------------------
+
+init(Req, _Opts) ->
+ {cowboy_rest, rabbit_mgmt_cors:set_headers(Req, ?MODULE), #context{}}.
+
+content_types_provided(ReqData, Context) ->
+ {[{<<"application/json">>, to_json}], ReqData, Context}.
+
+to_json(ReqData, Context) ->
+ try
+ Connections = do_connections_query(ReqData, Context),
+ rabbit_mgmt_util:reply_list_or_paginate(Connections, ReqData, Context)
+ catch
+ {error, invalid_range_parameters, Reason} ->
+ rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData,
+ Context)
+ end.
+
+is_authorized(ReqData, Context) ->
+ rabbit_mgmt_util:is_authorized(ReqData, Context).
+
+augmented(ReqData, Context) ->
+ rabbit_mgmt_util:filter_conn_ch_list(
+ rabbit_mgmt_db:get_all_connections(
+ rabbit_mgmt_util:range_ceil(ReqData)),
+ ReqData, Context).
+
+do_connections_query(ReqData, Context) ->
+ case rabbit_mgmt_util:disable_stats(ReqData) of
+ false ->
+ rabbit_stream_management_utils:keep_stream_connections(augmented(ReqData,
+ Context));
+ true ->
+ TrackedStreamConnections =
+ rabbit_stream_management_utils:keep_tracked_stream_connections(
+ rabbit_connection_tracking:list()),
+ rabbit_mgmt_util:filter_tracked_conn_list(TrackedStreamConnections,
+ ReqData, Context)
+ end.
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_connections_vhost_mgmt.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_connections_vhost_mgmt.erl
new file mode 100644
index 0000000000..8aadac2077
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_connections_vhost_mgmt.erl
@@ -0,0 +1,65 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_connections_vhost_mgmt).
+
+-behaviour(rabbit_mgmt_extension).
+
+-export([dispatcher/0,
+ web_ui/0]).
+-export([init/2,
+ to_json/2,
+ content_types_provided/2,
+ resource_exists/2,
+ is_authorized/2]).
+
+-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+
+dispatcher() ->
+ case rabbit_stream_management_utils:is_feature_flag_enabled() of
+ true -> [{"/stream/connections/:vhost", ?MODULE, []}];
+ false -> []
+ end.
+
+web_ui() ->
+ [].
+
+%%--------------------------------------------------------------------
+
+init(Req, _Opts) ->
+ {cowboy_rest, rabbit_mgmt_cors:set_headers(Req, ?MODULE), #context{}}.
+
+content_types_provided(ReqData, Context) ->
+ {[{<<"application/json">>, to_json}], ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+ {rabbit_vhost:exists(
+ rabbit_mgmt_util:id(vhost, ReqData)),
+ ReqData, Context}.
+
+to_json(ReqData, Context) ->
+ try
+ rabbit_mgmt_util:reply_list(
+ rabbit_stream_management_utils:keep_stream_connections(augmented(ReqData,
+ Context)),
+ ReqData, Context)
+ catch
+ {error, invalid_range_parameters, Reason} ->
+ rabbit_mgmt_util:bad_request(iolist_to_binary(Reason), ReqData,
+ Context)
+ end.
+
+is_authorized(ReqData, Context) ->
+ rabbit_mgmt_util:is_authorized_vhost_visible_for_monitoring(ReqData,
+ Context).
+
+augmented(ReqData, Context) ->
+ rabbit_mgmt_util:filter_conn_ch_list(
+ rabbit_mgmt_db:get_all_connections(
+ rabbit_mgmt_util:range_ceil(ReqData)),
+ ReqData, Context).
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_consumers_mgmt.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_consumers_mgmt.erl
new file mode 100644
index 0000000000..6c4e8b0565
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_consumers_mgmt.erl
@@ -0,0 +1,85 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_consumers_mgmt).
+
+-behaviour(rabbit_mgmt_extension).
+
+-export([dispatcher/0,
+ web_ui/0]).
+-export([init/2,
+ resource_exists/2,
+ to_json/2,
+ content_types_provided/2,
+ is_authorized/2]).
+
+-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+dispatcher() ->
+ case rabbit_stream_management_utils:is_feature_flag_enabled() of
+ true -> [{"/stream/consumers", ?MODULE, []},
+ {"/stream/consumers/:vhost", ?MODULE, []}];
+ false -> []
+ end.
+
+web_ui() ->
+ [].
+
+%%--------------------------------------------------------------------
+
+init(Req, _Opts) ->
+ {cowboy_rest, rabbit_mgmt_cors:set_headers(Req, ?MODULE), #context{}}.
+
+content_types_provided(ReqData, Context) ->
+ {[{<<"application/json">>, to_json}], ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+ {case rabbit_mgmt_util:vhost(ReqData) of
+ not_found ->
+ false;
+ none ->
+ true; % none means `all`
+ _ ->
+ true
+ end,
+ ReqData, Context}.
+
+to_json(ReqData, Context = #context{user = User}) ->
+ case rabbit_mgmt_util:disable_stats(ReqData) of
+ false ->
+ Arg = case rabbit_mgmt_util:vhost(ReqData) of
+ none ->
+ all;
+ VHost ->
+ VHost
+ end,
+ Consumers =
+ rabbit_mgmt_format:strip_pids(
+ rabbit_stream_mgmt_db:get_all_consumers(Arg)),
+ rabbit_mgmt_util:reply_list(filter_user(Consumers, User),
+ [],
+ ReqData,
+ Context);
+ true ->
+ rabbit_mgmt_util:bad_request(<<"Stats in management UI are disabled on this node">>,
+ ReqData, Context)
+ end.
+
+is_authorized(ReqData, Context) ->
+ rabbit_mgmt_util:is_authorized(ReqData, Context).
+
+filter_user(List, #user{username = Username, tags = Tags}) ->
+ case rabbit_mgmt_util:is_monitor(Tags) of
+ true ->
+ List;
+ false ->
+ [I
+ || I <- List,
+ rabbit_misc:pget(user, rabbit_misc:pget(connection_details, I))
+ == Username]
+ end.
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_management_utils.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_management_utils.erl
new file mode 100644
index 0000000000..35112fd4f4
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_management_utils.erl
@@ -0,0 +1,38 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_management_utils).
+
+-export([keep_stream_connections/1,
+ keep_tracked_stream_connections/1,
+ is_stream_connection/1,
+ is_feature_flag_enabled/0]).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+keep_stream_connections(Connections) ->
+ lists:filter(fun is_stream_connection/1, Connections).
+
+is_stream_connection(Connection) ->
+ case lists:keyfind(protocol, 1, Connection) of
+ {protocol, <<"stream">>} ->
+ true;
+ _ ->
+ false
+ end.
+
+keep_tracked_stream_connections(Connections) ->
+ lists:filter(fun (#tracked_connection{protocol = <<"stream">>}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Connections).
+
+is_feature_flag_enabled() ->
+ FeatureFlagsEnabled = rabbit_ff_registry:list(enabled),
+ maps:is_key(stream_queue, FeatureFlagsEnabled).
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_mgmt_db.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_mgmt_db.erl
new file mode 100644
index 0000000000..b5ac9fe987
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_mgmt_db.erl
@@ -0,0 +1,167 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_mgmt_db).
+
+-define(ENTITY_CONSUMER, consumer).
+-define(ENTITY_PUBLISHER, publisher).
+
+-include_lib("rabbitmq_stream/include/rabbit_stream_metrics.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-export([get_all_consumers/1,
+ get_all_publishers/1]).
+-export([entity_data/4]).
+-export([get_connection_consumers/1,
+ get_connection_publishers/1,
+ get_stream_publishers/1]).
+
+get_all_consumers(VHosts) ->
+ rabbit_mgmt_db:submit(fun(_Interval) -> consumers_stats(VHosts) end).
+
+get_all_publishers(VHosts) ->
+ rabbit_mgmt_db:submit(fun(_Interval) -> publishers_stats(VHosts) end).
+
+get_connection_consumers(ConnectionPid) when is_pid(ConnectionPid) ->
+ rabbit_mgmt_db:submit(fun(_Interval) ->
+ connection_consumers_stats(ConnectionPid)
+ end).
+
+get_connection_publishers(ConnectionPid) when is_pid(ConnectionPid) ->
+ rabbit_mgmt_db:submit(fun(_Interval) ->
+ connection_publishers_stats(ConnectionPid)
+ end).
+
+get_stream_publishers(QueueResource) ->
+ rabbit_mgmt_db:submit(fun(_Interval) ->
+ stream_publishers_stats(QueueResource)
+ end).
+
+consumers_stats(VHost) ->
+ Data =
+ rabbit_mgmt_db:get_data_from_nodes({rabbit_stream_mgmt_db,
+ entity_data,
+ [VHost, ?ENTITY_CONSUMER,
+ fun consumers_by_vhost/1]}),
+ [V || {_, V} <- maps:to_list(Data)].
+
+publishers_stats(VHost) ->
+ Data =
+ rabbit_mgmt_db:get_data_from_nodes({rabbit_stream_mgmt_db,
+ entity_data,
+ [VHost, ?ENTITY_PUBLISHER,
+ fun publishers_by_vhost/1]}),
+ [V || {_, V} <- maps:to_list(Data)].
+
+connection_consumers_stats(ConnectionPid) ->
+ Data =
+ rabbit_mgmt_db:get_data_from_nodes({rabbit_stream_mgmt_db,
+ entity_data,
+ [ConnectionPid, ?ENTITY_CONSUMER,
+ fun consumers_by_connection/1]}),
+ [V || {_, V} <- maps:to_list(Data)].
+
+connection_publishers_stats(ConnectionPid) ->
+ Data =
+ rabbit_mgmt_db:get_data_from_nodes({rabbit_stream_mgmt_db,
+ entity_data,
+ [ConnectionPid, ?ENTITY_PUBLISHER,
+ fun publishers_by_connection/1]}),
+ [V || {_, V} <- maps:to_list(Data)].
+
+stream_publishers_stats(Queue) ->
+ Data =
+ rabbit_mgmt_db:get_data_from_nodes({rabbit_stream_mgmt_db,
+ entity_data,
+ [Queue, ?ENTITY_PUBLISHER,
+ fun publishers_by_stream/1]}),
+ [V || {_, V} <- maps:to_list(Data)].
+
+entity_data(_Pid, Param, EntityType, QueryFun) ->
+ maps:from_list([begin
+ AugmentedPublisher = augment_entity(EntityType, P),
+ {P,
+ augment_connection_pid(AugmentedPublisher)
+ ++ AugmentedPublisher}
+ end
+ || P <- QueryFun(Param)]).
+
+augment_entity(?ENTITY_CONSUMER, {{Q, ConnPid, SubId}, Props}) ->
+ [{queue, format_resource(Q)}, {connection, ConnPid},
+ {subscription_id, SubId}
+ | Props];
+augment_entity(?ENTITY_PUBLISHER, {{Q, ConnPid, PubId}, Props}) ->
+ [{queue, format_resource(Q)}, {connection, ConnPid},
+ {publisher_id, PubId}
+ | Props].
+
+consumers_by_vhost(VHost) ->
+ ets:select(?TABLE_CONSUMER,
+ [{{{#resource{virtual_host = '$1', _ = '_'}, '_', '_'}, '_'},
+ [{'orelse', {'==', all, VHost}, {'==', VHost, '$1'}}],
+ ['$_']}]).
+
+publishers_by_vhost(VHost) ->
+ ets:select(?TABLE_PUBLISHER,
+ [{{{#resource{virtual_host = '$1', _ = '_'}, '_', '_'}, '_'},
+ [{'orelse', {'==', all, VHost}, {'==', VHost, '$1'}}],
+ ['$_']}]).
+
+consumers_by_connection(ConnectionPid) ->
+ get_entity_stats(?TABLE_CONSUMER, ConnectionPid).
+
+publishers_by_connection(ConnectionPid) ->
+ get_entity_stats(?TABLE_PUBLISHER, ConnectionPid).
+
+publishers_by_stream(QueueResource) ->
+ get_entity_stats_by_resource(?TABLE_PUBLISHER, QueueResource).
+
+get_entity_stats(Table, Id) ->
+ ets:select(Table, match_entity_spec(Id)).
+
+match_entity_spec(ConnectionId) ->
+ [{{{'_', '$1', '_'}, '_'}, [{'==', ConnectionId, '$1'}], ['$_']}].
+
+get_entity_stats_by_resource(Table, Resource) ->
+ ets:select(Table, match_entity_spec_by_resource(Resource)).
+
+match_entity_spec_by_resource(#resource{virtual_host = VHost,
+ name = Name}) ->
+ [{{{#resource{virtual_host = '$1',
+ name = '$2',
+ _ = '_'},
+ '_', '_'},
+ '_'},
+ [{'andalso', {'==', '$1', VHost}, {'==', Name, '$2'}}], ['$_']}].
+
+augment_connection_pid(Consumer) ->
+ Pid = rabbit_misc:pget(connection, Consumer),
+ Conn =
+ rabbit_mgmt_data:lookup_element(connection_created_stats, Pid, 3),
+ ConnDetails =
+ case Conn of
+ [] -> %% If the connection has just been opened, we might not yet have the data
+ [];
+ _ ->
+ [{name, rabbit_misc:pget(name, Conn)},
+ {user, rabbit_misc:pget(user, Conn)},
+ {node, rabbit_misc:pget(node, Conn)},
+ {peer_port, rabbit_misc:pget(peer_port, Conn)},
+ {peer_host, rabbit_misc:pget(peer_host, Conn)}]
+ end,
+ [{connection_details, ConnDetails}].
+
+format_resource(unknown) ->
+ unknown;
+format_resource(Res) ->
+ format_resource(name, Res).
+
+format_resource(_, unknown) ->
+ unknown;
+format_resource(NameAs,
+ #resource{name = Name, virtual_host = VHost}) ->
+ [{NameAs, Name}, {vhost, VHost}].
diff --git a/deps/rabbitmq_stream_management/src/rabbit_stream_publishers_mgmt.erl b/deps/rabbitmq_stream_management/src/rabbit_stream_publishers_mgmt.erl
new file mode 100644
index 0000000000..0ec2f5a761
--- /dev/null
+++ b/deps/rabbitmq_stream_management/src/rabbit_stream_publishers_mgmt.erl
@@ -0,0 +1,108 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stream_publishers_mgmt).
+
+-behaviour(rabbit_mgmt_extension).
+
+-export([dispatcher/0,
+ web_ui/0]).
+-export([init/2,
+ resource_exists/2,
+ to_json/2,
+ content_types_provided/2,
+ is_authorized/2]).
+
+-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+dispatcher() ->
+ case rabbit_stream_management_utils:is_feature_flag_enabled() of
+ true -> [{"/stream/publishers", ?MODULE, []},
+ {"/stream/publishers/:vhost", ?MODULE, []},
+ {"/stream/publishers/:vhost/:queue", ?MODULE, []}];
+ false -> []
+ end.
+
+web_ui() ->
+ [].
+
+%%--------------------------------------------------------------------
+
+init(Req, _Opts) ->
+ {cowboy_rest, rabbit_mgmt_cors:set_headers(Req, ?MODULE), #context{}}.
+
+content_types_provided(ReqData, Context) ->
+ {[{<<"application/json">>, to_json}], ReqData, Context}.
+
+resource_exists(ReqData, Context) ->
+ {case rabbit_mgmt_util:vhost(ReqData) of
+ not_found ->
+ false;
+ none ->
+ true; % none means `all`
+ _ ->
+ case rabbit_mgmt_util:id(queue, ReqData) of
+ none ->
+ true;
+ _ ->
+ case rabbit_mgmt_wm_queue:queue(ReqData) of
+ not_found ->
+ false;
+ _ ->
+ true
+ end
+ end
+ end,
+ ReqData, Context}.
+
+to_json(ReqData, Context = #context{user = User}) ->
+ case rabbit_mgmt_util:disable_stats(ReqData) of
+ false ->
+ VHost =
+ case rabbit_mgmt_util:vhost(ReqData) of
+ none ->
+ all;
+ V ->
+ V
+ end,
+ Queue = rabbit_mgmt_util:id(queue, ReqData),
+ Publishers =
+ case {VHost, Queue} of
+ {VHost, none} ->
+ rabbit_mgmt_format:strip_pids(
+ rabbit_stream_mgmt_db:get_all_publishers(VHost));
+ {VHost, Q} ->
+ QueueResource =
+ #resource{virtual_host = VHost,
+ name = Q,
+ kind = queue},
+ rabbit_mgmt_format:strip_pids(
+ rabbit_stream_mgmt_db:get_stream_publishers(QueueResource))
+ end,
+ rabbit_mgmt_util:reply_list(filter_user(Publishers, User),
+ [],
+ ReqData,
+ Context);
+ true ->
+ rabbit_mgmt_util:bad_request(<<"Stats in management UI are disabled on this node">>,
+ ReqData, Context)
+ end.
+
+is_authorized(ReqData, Context) ->
+ rabbit_mgmt_util:is_authorized(ReqData, Context).
+
+filter_user(List, #user{username = Username, tags = Tags}) ->
+ case rabbit_mgmt_util:is_monitor(Tags) of
+ true ->
+ List;
+ false ->
+ [I
+ || I <- List,
+ rabbit_misc:pget(user, rabbit_misc:pget(connection_details, I))
+ == Username]
+ end.
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE.erl b/deps/rabbitmq_stream_management/test/http_SUITE.erl
new file mode 100644
index 0000000000..83f288c0c6
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE.erl
@@ -0,0 +1,120 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(http_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("rabbit_common/include/rabbit_framing.hrl").
+-include_lib("rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl").
+
+-compile(export_all).
+
+all() ->
+ [{group, non_parallel_tests}].
+
+groups() ->
+ [{non_parallel_tests, [], [stream_management]}].
+
+%% -------------------------------------------------------------------
+%% Testsuite setup/teardown.
+%% -------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ case rabbit_ct_helpers:is_mixed_versions() of
+ true ->
+ {skip, "suite is not mixed versions compatible"};
+ _ ->
+ rabbit_ct_helpers:log_environment(),
+ Config1 =
+ rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodename_suffix, ?MODULE}]),
+ Config2 =
+ rabbit_ct_helpers:set_config(Config1,
+ {rabbitmq_ct_tls_verify,
+ verify_none}),
+ SetupStep =
+ fun(StepConfig) ->
+ rabbit_ct_helpers:merge_app_env(StepConfig,
+ {rabbit,
+ [{collect_statistics_interval,
+ 500}]})
+ end,
+ rabbit_ct_helpers:run_setup_steps(Config2,
+ [SetupStep]
+ ++ rabbit_ct_broker_helpers:setup_steps()
+ ++ rabbit_ct_client_helpers:setup_steps())
+ end.
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ rabbit_ct_client_helpers:teardown_steps()
+ ++ rabbit_ct_broker_helpers:teardown_steps()).
+
+init_per_group(_, Config) ->
+ Config.
+
+end_per_group(_, Config) ->
+ Config.
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase).
+
+end_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_finished(Config, Testcase).
+
+%% -------------------------------------------------------------------
+%% Testcases.
+%% -------------------------------------------------------------------
+
+stream_management(Config) ->
+ UserManagement = <<"user-management">>,
+ UserMonitoring = <<"user-monitoring">>,
+ Vhost1 = <<"vh1">>,
+ Vhost2 = <<"vh2">>,
+ rabbit_ct_broker_helpers:add_user(Config, UserManagement),
+ rabbit_ct_broker_helpers:set_user_tags(Config,
+ 0,
+ UserManagement,
+ [management]),
+ rabbit_ct_broker_helpers:add_user(Config, UserMonitoring),
+ rabbit_ct_broker_helpers:set_user_tags(Config,
+ 0,
+ UserMonitoring,
+ [monitoring]),
+ rabbit_ct_broker_helpers:add_vhost(Config, Vhost1),
+ rabbit_ct_broker_helpers:add_vhost(Config, Vhost2),
+
+ rabbit_ct_broker_helpers:set_full_permissions(Config, UserManagement,
+ Vhost1),
+ rabbit_ct_broker_helpers:set_full_permissions(Config, UserMonitoring,
+ Vhost1),
+ rabbit_ct_broker_helpers:set_full_permissions(Config, <<"guest">>,
+ Vhost1),
+ rabbit_ct_broker_helpers:set_full_permissions(Config, <<"guest">>,
+ Vhost2),
+
+ StreamPortNode = get_stream_port(Config),
+ StreamPortTlsNode = get_stream_port_tls(Config),
+ ManagementPortNode = get_management_port(Config),
+ DataDir = rabbit_ct_helpers:get_config(Config, data_dir),
+ MakeResult =
+ rabbit_ct_helpers:make(Config, DataDir,
+ ["tests",
+ {"STREAM_PORT=~b", [StreamPortNode]},
+ {"STREAM_PORT_TLS=~b", [StreamPortTlsNode]},
+ {"MANAGEMENT_PORT=~b", [ManagementPortNode]}]),
+ {ok, _} = MakeResult.
+
+get_stream_port(Config) ->
+ rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stream).
+
+get_management_port(Config) ->
+ rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mgmt).
+
+get_stream_port_tls(Config) ->
+ rabbit_ct_broker_helpers:get_node_config(Config, 0,
+ tcp_port_stream_tls).
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/.gitignore b/deps/rabbitmq_stream_management/test/http_SUITE_data/.gitignore
new file mode 100644
index 0000000000..4c70cdb707
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/.gitignore
@@ -0,0 +1,3 @@
+/build/
+/lib/
+/target/
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/MavenWrapperDownloader.java b/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/MavenWrapperDownloader.java
new file mode 100644
index 0000000000..b901097f2d
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/MavenWrapperDownloader.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2007-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.net.*;
+import java.io.*;
+import java.nio.channels.*;
+import java.util.Properties;
+
+public class MavenWrapperDownloader {
+
+ private static final String WRAPPER_VERSION = "0.5.6";
+ /**
+ * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
+ */
+ private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+ + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
+
+ /**
+ * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
+ * use instead of the default one.
+ */
+ private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
+ ".mvn/wrapper/maven-wrapper.properties";
+
+ /**
+ * Path where the maven-wrapper.jar will be saved to.
+ */
+ private static final String MAVEN_WRAPPER_JAR_PATH =
+ ".mvn/wrapper/maven-wrapper.jar";
+
+ /**
+ * Name of the property which should be used to override the default download url for the wrapper.
+ */
+ private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
+
+ public static void main(String args[]) {
+ System.out.println("- Downloader started");
+ File baseDirectory = new File(args[0]);
+ System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
+
+ // If the maven-wrapper.properties exists, read it and check if it contains a custom
+ // wrapperUrl parameter.
+ File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
+ String url = DEFAULT_DOWNLOAD_URL;
+ if(mavenWrapperPropertyFile.exists()) {
+ FileInputStream mavenWrapperPropertyFileInputStream = null;
+ try {
+ mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
+ Properties mavenWrapperProperties = new Properties();
+ mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
+ url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
+ } catch (IOException e) {
+ System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
+ } finally {
+ try {
+ if(mavenWrapperPropertyFileInputStream != null) {
+ mavenWrapperPropertyFileInputStream.close();
+ }
+ } catch (IOException e) {
+ // Ignore ...
+ }
+ }
+ }
+ System.out.println("- Downloading from: " + url);
+
+ File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
+ if(!outputFile.getParentFile().exists()) {
+ if(!outputFile.getParentFile().mkdirs()) {
+ System.out.println(
+ "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
+ }
+ }
+ System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
+ try {
+ downloadFileFromURL(url, outputFile);
+ System.out.println("Done");
+ System.exit(0);
+ } catch (Throwable e) {
+ System.out.println("- Error downloading");
+ e.printStackTrace();
+ System.exit(1);
+ }
+ }
+
+ private static void downloadFileFromURL(String urlString, File destination) throws Exception {
+ if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
+ String username = System.getenv("MVNW_USERNAME");
+ char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
+ Authenticator.setDefault(new Authenticator() {
+ @Override
+ protected PasswordAuthentication getPasswordAuthentication() {
+ return new PasswordAuthentication(username, password);
+ }
+ });
+ }
+ URL website = new URL(urlString);
+ ReadableByteChannel rbc;
+ rbc = Channels.newChannel(website.openStream());
+ FileOutputStream fos = new FileOutputStream(destination);
+ fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
+ fos.close();
+ rbc.close();
+ }
+
+}
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/maven-wrapper.jar b/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/maven-wrapper.jar
new file mode 100644
index 0000000000..2cc7d4a55c
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/maven-wrapper.jar
Binary files differ
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/maven-wrapper.properties b/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/maven-wrapper.properties
new file mode 100644
index 0000000000..642d572ce9
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/.mvn/wrapper/maven-wrapper.properties
@@ -0,0 +1,2 @@
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/Makefile b/deps/rabbitmq_stream_management/test/http_SUITE_data/Makefile
new file mode 100644
index 0000000000..dae43a1ad6
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/Makefile
@@ -0,0 +1,15 @@
+export PATH :=$(CURDIR):$(PATH)
+HOSTNAME := $(shell hostname)
+MVN_FLAGS += -Dstream.port=$(STREAM_PORT) \
+ -Dstream.port.tls=$(STREAM_PORT_TLS) \
+ -Dmanagement.port=$(MANAGEMENT_PORT)
+
+.PHONY: tests clean
+
+tests:
+ # Note: to run a single test
+ # @mvnw -q $(MVN_FLAGS) -Dtest=StreamTest#metadataOnClusterShouldReturnLeaderAndReplicas test
+ @mvnw -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -B $(MVN_FLAGS) test
+
+clean:
+ @mvnw clean
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/mvnw b/deps/rabbitmq_stream_management/test/http_SUITE_data/mvnw
new file mode 100755
index 0000000000..41c0f0c23d
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/mvnw
@@ -0,0 +1,310 @@
+#!/bin/sh
+# ----------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ----------------------------------------------------------------------------
+
+# ----------------------------------------------------------------------------
+# Maven Start Up Batch script
+#
+# Required ENV vars:
+# ------------------
+# JAVA_HOME - location of a JDK home dir
+#
+# Optional ENV vars
+# -----------------
+# M2_HOME - location of maven2's installed home dir
+# MAVEN_OPTS - parameters passed to the Java VM when running Maven
+# e.g. to debug Maven itself, use
+# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+# ----------------------------------------------------------------------------
+
+if [ -z "$MAVEN_SKIP_RC" ] ; then
+
+ if [ -f /etc/mavenrc ] ; then
+ . /etc/mavenrc
+ fi
+
+ if [ -f "$HOME/.mavenrc" ] ; then
+ . "$HOME/.mavenrc"
+ fi
+
+fi
+
+# OS specific support. $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+mingw=false
+case "`uname`" in
+ CYGWIN*) cygwin=true ;;
+ MINGW*) mingw=true;;
+ Darwin*) darwin=true
+ # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
+ # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
+ if [ -z "$JAVA_HOME" ]; then
+ if [ -x "/usr/libexec/java_home" ]; then
+ export JAVA_HOME="`/usr/libexec/java_home`"
+ else
+ export JAVA_HOME="/Library/Java/Home"
+ fi
+ fi
+ ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+ if [ -r /etc/gentoo-release ] ; then
+ JAVA_HOME=`java-config --jre-home`
+ fi
+fi
+
+if [ -z "$M2_HOME" ] ; then
+ ## resolve links - $0 may be a link to maven's home
+ PRG="$0"
+
+ # need this for relative symlinks
+ while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG="`dirname "$PRG"`/$link"
+ fi
+ done
+
+ saveddir=`pwd`
+
+ M2_HOME=`dirname "$PRG"`/..
+
+ # make it fully qualified
+ M2_HOME=`cd "$M2_HOME" && pwd`
+
+ cd "$saveddir"
+ # echo Using m2 at $M2_HOME
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+ [ -n "$M2_HOME" ] &&
+ M2_HOME=`cygpath --unix "$M2_HOME"`
+ [ -n "$JAVA_HOME" ] &&
+ JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+ [ -n "$CLASSPATH" ] &&
+ CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# For Mingw, ensure paths are in UNIX format before anything is touched
+if $mingw ; then
+ [ -n "$M2_HOME" ] &&
+ M2_HOME="`(cd "$M2_HOME"; pwd)`"
+ [ -n "$JAVA_HOME" ] &&
+ JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
+fi
+
+if [ -z "$JAVA_HOME" ]; then
+ javaExecutable="`which javac`"
+ if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
+ # readlink(1) is not available as standard on Solaris 10.
+ readLink=`which readlink`
+ if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
+ if $darwin ; then
+ javaHome="`dirname \"$javaExecutable\"`"
+ javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
+ else
+ javaExecutable="`readlink -f \"$javaExecutable\"`"
+ fi
+ javaHome="`dirname \"$javaExecutable\"`"
+ javaHome=`expr "$javaHome" : '\(.*\)/bin'`
+ JAVA_HOME="$javaHome"
+ export JAVA_HOME
+ fi
+ fi
+fi
+
+if [ -z "$JAVACMD" ] ; then
+ if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ else
+ JAVACMD="`which java`"
+ fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+ echo "Error: JAVA_HOME is not defined correctly." >&2
+ echo " We cannot execute $JAVACMD" >&2
+ exit 1
+fi
+
+if [ -z "$JAVA_HOME" ] ; then
+ echo "Warning: JAVA_HOME environment variable is not set."
+fi
+
+CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
+
+# traverses directory structure from process work directory to filesystem root
+# first directory with .mvn subdirectory is considered project base directory
+find_maven_basedir() {
+
+ if [ -z "$1" ]
+ then
+ echo "Path not specified to find_maven_basedir"
+ return 1
+ fi
+
+ basedir="$1"
+ wdir="$1"
+ while [ "$wdir" != '/' ] ; do
+ if [ -d "$wdir"/.mvn ] ; then
+ basedir=$wdir
+ break
+ fi
+ # workaround for JBEAP-8937 (on Solaris 10/Sparc)
+ if [ -d "${wdir}" ]; then
+ wdir=`cd "$wdir/.."; pwd`
+ fi
+ # end of workaround
+ done
+ echo "${basedir}"
+}
+
+# concatenates all lines of a file
+concat_lines() {
+ if [ -f "$1" ]; then
+ echo "$(tr -s '\n' ' ' < "$1")"
+ fi
+}
+
+BASE_DIR=`find_maven_basedir "$(pwd)"`
+if [ -z "$BASE_DIR" ]; then
+ exit 1;
+fi
+
+##########################################################################################
+# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+# This allows using the maven wrapper in projects that prohibit checking in binary data.
+##########################################################################################
+if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
+ if [ "$MVNW_VERBOSE" = true ]; then
+ echo "Found .mvn/wrapper/maven-wrapper.jar"
+ fi
+else
+ if [ "$MVNW_VERBOSE" = true ]; then
+ echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
+ fi
+ if [ -n "$MVNW_REPOURL" ]; then
+ jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+ else
+ jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+ fi
+ while IFS="=" read key value; do
+ case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
+ esac
+ done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
+ if [ "$MVNW_VERBOSE" = true ]; then
+ echo "Downloading from: $jarUrl"
+ fi
+ wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
+ if $cygwin; then
+ wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
+ fi
+
+ if command -v wget > /dev/null; then
+ if [ "$MVNW_VERBOSE" = true ]; then
+ echo "Found wget ... using wget"
+ fi
+ if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
+ wget "$jarUrl" -O "$wrapperJarPath"
+ else
+ wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
+ fi
+ elif command -v curl > /dev/null; then
+ if [ "$MVNW_VERBOSE" = true ]; then
+ echo "Found curl ... using curl"
+ fi
+ if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
+ curl -o "$wrapperJarPath" "$jarUrl" -f
+ else
+ curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
+ fi
+
+ else
+ if [ "$MVNW_VERBOSE" = true ]; then
+ echo "Falling back to using Java to download"
+ fi
+ javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
+ # For Cygwin, switch paths to Windows format before running javac
+ if $cygwin; then
+ javaClass=`cygpath --path --windows "$javaClass"`
+ fi
+ if [ -e "$javaClass" ]; then
+ if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+ if [ "$MVNW_VERBOSE" = true ]; then
+ echo " - Compiling MavenWrapperDownloader.java ..."
+ fi
+ # Compiling the Java class
+ ("$JAVA_HOME/bin/javac" "$javaClass")
+ fi
+ if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+ # Running the downloader
+ if [ "$MVNW_VERBOSE" = true ]; then
+ echo " - Running MavenWrapperDownloader.java ..."
+ fi
+ ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
+ fi
+ fi
+ fi
+fi
+##########################################################################################
+# End of extension
+##########################################################################################
+
+export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
+if [ "$MVNW_VERBOSE" = true ]; then
+ echo $MAVEN_PROJECTBASEDIR
+fi
+MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+ [ -n "$M2_HOME" ] &&
+ M2_HOME=`cygpath --path --windows "$M2_HOME"`
+ [ -n "$JAVA_HOME" ] &&
+ JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+ [ -n "$CLASSPATH" ] &&
+ CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+ [ -n "$MAVEN_PROJECTBASEDIR" ] &&
+ MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
+fi
+
+# Provide a "standardized" way to retrieve the CLI args that will
+# work with both Windows and non-Windows executions.
+MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
+export MAVEN_CMD_LINE_ARGS
+
+WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+exec "$JAVACMD" \
+ $MAVEN_OPTS \
+ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
+ "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
+ ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/mvnw.cmd b/deps/rabbitmq_stream_management/test/http_SUITE_data/mvnw.cmd
new file mode 100644
index 0000000000..86115719e5
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/mvnw.cmd
@@ -0,0 +1,182 @@
+@REM ----------------------------------------------------------------------------
+@REM Licensed to the Apache Software Foundation (ASF) under one
+@REM or more contributor license agreements. See the NOTICE file
+@REM distributed with this work for additional information
+@REM regarding copyright ownership. The ASF licenses this file
+@REM to you under the Apache License, Version 2.0 (the
+@REM "License"); you may not use this file except in compliance
+@REM with the License. You may obtain a copy of the License at
+@REM
+@REM http://www.apache.org/licenses/LICENSE-2.0
+@REM
+@REM Unless required by applicable law or agreed to in writing,
+@REM software distributed under the License is distributed on an
+@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@REM KIND, either express or implied. See the License for the
+@REM specific language governing permissions and limitations
+@REM under the License.
+@REM ----------------------------------------------------------------------------
+
+@REM ----------------------------------------------------------------------------
+@REM Maven Start Up Batch script
+@REM
+@REM Required ENV vars:
+@REM JAVA_HOME - location of a JDK home dir
+@REM
+@REM Optional ENV vars
+@REM M2_HOME - location of maven2's installed home dir
+@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
+@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
+@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
+@REM e.g. to debug Maven itself, use
+@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+@REM ----------------------------------------------------------------------------
+
+@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
+@echo off
+@REM set title of command window
+title %0
+@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
+@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
+
+@REM set %HOME% to equivalent of $HOME
+if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
+
+@REM Execute a user defined script before this one
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
+@REM check for pre script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
+if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
+:skipRcPre
+
+@setlocal
+
+set ERROR_CODE=0
+
+@REM To isolate internal variables from possible post scripts, we use another setlocal
+@setlocal
+
+@REM ==== START VALIDATION ====
+if not "%JAVA_HOME%" == "" goto OkJHome
+
+echo.
+echo Error: JAVA_HOME not found in your environment. >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+:OkJHome
+if exist "%JAVA_HOME%\bin\java.exe" goto init
+
+echo.
+echo Error: JAVA_HOME is set to an invalid directory. >&2
+echo JAVA_HOME = "%JAVA_HOME%" >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+@REM ==== END VALIDATION ====
+
+:init
+
+@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
+@REM Fallback to current working directory if not found.
+
+set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
+IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
+
+set EXEC_DIR=%CD%
+set WDIR=%EXEC_DIR%
+:findBaseDir
+IF EXIST "%WDIR%"\.mvn goto baseDirFound
+cd ..
+IF "%WDIR%"=="%CD%" goto baseDirNotFound
+set WDIR=%CD%
+goto findBaseDir
+
+:baseDirFound
+set MAVEN_PROJECTBASEDIR=%WDIR%
+cd "%EXEC_DIR%"
+goto endDetectBaseDir
+
+:baseDirNotFound
+set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
+cd "%EXEC_DIR%"
+
+:endDetectBaseDir
+
+IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
+
+@setlocal EnableExtensions EnableDelayedExpansion
+for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
+@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
+
+:endReadAdditionalConfig
+
+SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
+set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
+set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+
+FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
+ IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
+)
+
+@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
+if exist %WRAPPER_JAR% (
+ if "%MVNW_VERBOSE%" == "true" (
+ echo Found %WRAPPER_JAR%
+ )
+) else (
+ if not "%MVNW_REPOURL%" == "" (
+ SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+ )
+ if "%MVNW_VERBOSE%" == "true" (
+ echo Couldn't find %WRAPPER_JAR%, downloading it ...
+ echo Downloading from: %DOWNLOAD_URL%
+ )
+
+ powershell -Command "&{"^
+ "$webclient = new-object System.Net.WebClient;"^
+ "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
+ "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
+ "}"^
+ "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
+ "}"
+ if "%MVNW_VERBOSE%" == "true" (
+ echo Finished downloading %WRAPPER_JAR%
+ )
+)
+@REM End of extension
+
+@REM Provide a "standardized" way to retrieve the CLI args that will
+@REM work with both Windows and non-Windows executions.
+set MAVEN_CMD_LINE_ARGS=%*
+
+%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
+if ERRORLEVEL 1 goto error
+goto end
+
+:error
+set ERROR_CODE=1
+
+:end
+@endlocal & set ERROR_CODE=%ERROR_CODE%
+
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
+@REM check for post script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
+if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
+:skipRcPost
+
+@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
+if "%MAVEN_BATCH_PAUSE%" == "on" pause
+
+if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
+
+exit /B %ERROR_CODE%
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/pom.xml b/deps/rabbitmq_stream_management/test/http_SUITE_data/pom.xml
new file mode 100644
index 0000000000..5ec43a9a0e
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/pom.xml
@@ -0,0 +1,144 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <groupId>com.rabbitmq.stream</groupId>
+ <artifactId>rabbitmq-stream-management-tests</artifactId>
+ <version>1.0-SNAPSHOT</version>
+
+ <licenses>
+ <license>
+ <name>MPL 2.0</name>
+ <url>https://www.mozilla.org/en-US/MPL/2.0/</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+
+ <developers>
+ <developer>
+ <email>info@rabbitmq.com</email>
+ <name>Team RabbitMQ</name>
+ <organization>VMware, Inc. or its affiliates.</organization>
+ <organizationUrl>https://rabbitmq.com</organizationUrl>
+ </developer>
+ </developers>
+
+ <properties>
+ <stream-client.version>[0.5.0-SNAPSHOT,1.0-SNAPSHOT)</stream-client.version>
+ <junit.jupiter.version>5.8.2</junit.jupiter.version>
+ <assertj.version>3.21.0</assertj.version>
+ <okhttp.version>4.9.3</okhttp.version>
+ <gson.version>2.8.9</gson.version>
+ <logback.version>1.2.7</logback.version>
+ <maven.compiler.plugin.version>3.8.1</maven.compiler.plugin.version>
+ <maven-surefire-plugin.version>2.22.2</maven-surefire-plugin.version>
+ <spotless.version>2.2.0</spotless.version>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+
+ <dependencies>
+
+ <dependency>
+ <groupId>com.rabbitmq</groupId>
+ <artifactId>stream-client</artifactId>
+ <version>${stream-client.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-engine</artifactId>
+ <version>${junit.jupiter.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-params</artifactId>
+ <version>${junit.jupiter.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <version>${assertj.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.squareup.okhttp3</groupId>
+ <artifactId>okhttp</artifactId>
+ <version>${okhttp.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.google.code.gson</groupId>
+ <artifactId>gson</artifactId>
+ <version>${gson.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ <version>${logback.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ </dependencies>
+
+ <build>
+
+ <plugins>
+
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>${maven.compiler.plugin.version}</version>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ <compilerArgs>
+ <arg>-Xlint:deprecation</arg>
+ <arg>-Xlint:unchecked</arg>
+ </compilerArgs>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>${maven-surefire-plugin.version}</version>
+ </plugin>
+
+ <plugin>
+ <groupId>com.diffplug.spotless</groupId>
+ <artifactId>spotless-maven-plugin</artifactId>
+ <version>${spotless.version}</version>
+ <configuration>
+ <java>
+ <googleJavaFormat>
+ <version>1.9</version>
+ <style>GOOGLE</style>
+ </googleJavaFormat>
+ </java>
+ </configuration>
+ </plugin>
+
+ </plugins>
+
+ </build>
+
+ <repositories>
+
+ <repository>
+ <id>ossrh</id>
+ <url>https://oss.sonatype.org/content/repositories/snapshots</url>
+ <snapshots><enabled>true</enabled></snapshots>
+ <releases><enabled>false</enabled></releases>
+ </repository>
+
+ </repositories>
+
+</project>
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/java/com/rabbitmq/stream/HttpTest.java b/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/java/com/rabbitmq/stream/HttpTest.java
new file mode 100644
index 0000000000..495c831b6a
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/java/com/rabbitmq/stream/HttpTest.java
@@ -0,0 +1,903 @@
+// The contents of this file are subject to the Mozilla Public License
+// Version 2.0 (the "License"); you may not use this file except in
+// compliance with the License. You may obtain a copy of the License
+// at https://www.mozilla.org/en-US/MPL/2.0/
+//
+// Software distributed under the License is distributed on an "AS IS"
+// basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+// the License for the specific language governing rights and
+// limitations under the License.
+//
+// The Original Code is RabbitMQ.
+//
+// The Initial Developer of the Original Code is Pivotal Software, Inc.
+// Copyright (c) 2020 VMware, Inc. or its affiliates. All rights reserved.
+//
+
+package com.rabbitmq.stream;
+
+import static com.rabbitmq.stream.TestUtils.booleanFalse;
+import static com.rabbitmq.stream.TestUtils.booleanTrue;
+import static com.rabbitmq.stream.TestUtils.isNull;
+import static com.rabbitmq.stream.TestUtils.notNull;
+import static com.rabbitmq.stream.TestUtils.waitUntil;
+import static java.lang.String.format;
+import static org.assertj.core.api.Assertions.as;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.InstanceOfAssertFactories.MAP;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.rabbitmq.stream.TestUtils.ClientFactory;
+import com.rabbitmq.stream.TestUtils.TrustEverythingTrustManager;
+import com.rabbitmq.stream.impl.Client;
+import com.rabbitmq.stream.impl.Client.ClientParameters;
+import io.netty.channel.EventLoopGroup;
+import io.netty.handler.ssl.SslContextBuilder;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.UUID;
+import java.util.concurrent.Callable;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.Response;
+import okhttp3.ResponseBody;
+import org.assertj.core.api.Condition;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
+import org.junit.jupiter.params.provider.NullSource;
+import org.junit.jupiter.params.provider.ValueSource;
+
+@ExtendWith(TestUtils.StreamTestInfrastructureExtension.class)
+public class HttpTest {
+
+ static OkHttpClient httpClient = httpClient("guest");
+ static Gson gson = new GsonBuilder().create();
+ EventLoopGroup eventLoopGroup;
+ ClientFactory cf;
+ String stream;
+
+ static OkHttpClient httpClient(String usernamePassword) {
+ return new OkHttpClient.Builder()
+ .authenticator(TestUtils.authenticator(usernamePassword))
+ .build();
+ }
+
+ static String get(String endpoint) throws IOException {
+ return get(httpClient, endpoint);
+ }
+
+ static String get(OkHttpClient client, String endpoint) throws IOException {
+ Request request = new Request.Builder().url(url(endpoint)).build();
+ try (Response response = client.newCall(request).execute()) {
+ if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
+
+ ResponseBody body = response.body();
+ return body == null ? "" : body.string();
+ }
+ }
+
+ static String url(String endpoint) {
+ return "http://localhost:" + TestUtils.managementPort() + "/api" + endpoint;
+ }
+
+ @SuppressWarnings("unchecked")
+ static Map<String, String> connectionDetails(Map<String, Object> parent) {
+ return (Map<String, String>) parent.get("connection_details");
+ }
+
+ @SuppressWarnings("unchecked")
+ static Map<String, String> queue(Map<String, Object> parent) {
+ return (Map<String, String>) parent.get("queue");
+ }
+
+ static void delete(String endpoint) throws IOException {
+ Request request = new Request.Builder().delete().url(url(endpoint)).build();
+ try (Response response = httpClient.newCall(request).execute()) {
+ if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ static List<Map<String, Object>> toMaps(String json) {
+ return Arrays.asList(gson.fromJson(json, Map[].class));
+ }
+
+ @SuppressWarnings("unchecked")
+ static Map<String, Object> toMap(String json) {
+ return gson.fromJson(json, Map.class);
+ }
+
+ static String connectionName(Client client) {
+ InetSocketAddress localAddress = (InetSocketAddress) client.localAddress();
+ InetSocketAddress remoteAddress = (InetSocketAddress) client.remoteAddress();
+ return format("127.0.0.1:%d -> 127.0.0.1:%d", localAddress.getPort(), remoteAddress.getPort());
+ }
+
+ static List<Map<String, Object>> entities(List<Map<String, Object>> entities, Client client) {
+ String connectionName = connectionName(client);
+ return entities.stream()
+ .filter(
+ c ->
+ c.get("connection_details") instanceof Map
+ && connectionName.equals(connectionDetails(c).get("name")))
+ .collect(Collectors.toList());
+ }
+
+ static List<Map<String, Object>> entities(
+ List<Map<String, Object>> entities, Predicate<Map<String, Object>> filter) {
+ return entities.stream().filter(filter).collect(Collectors.toList());
+ }
+
+ static Map<String, Object> entity(
+ List<Map<String, Object>> entities, Predicate<Map<String, Object>> filter) {
+ return entities.stream().filter(filter).findFirst().orElse(Collections.emptyMap());
+ }
+
+ static TestRequest[] requests(TestRequest... requests) {
+ return requests;
+ }
+
+ static TestRequest r(String endpoint, int expectedCount) {
+ return new TestRequest(endpoint, expectedCount);
+ }
+
+ static Stream<Map<String, String>> subscriptionProperties() {
+ return Stream.of(Collections.emptyMap(), map());
+ }
+
+ static Map<String, String> map() {
+ Map<String, String> map = new LinkedHashMap<>();
+ map.put("key1", "value1");
+ map.put("key2", "value2");
+ return map;
+ }
+
+ @Test
+ void connections() throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/connections"));
+ int initialCount = request.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ AtomicBoolean closed = new AtomicBoolean(false);
+ Client client =
+ cf.get(
+ new ClientParameters()
+ .clientProperty("connection_name", connectionProvidedName)
+ .shutdownListener(shutdownContext -> closed.set(true)));
+
+ waitUntil(() -> request.call().size() == initialCount + 1);
+
+ Map<String, Object> c =
+ request.call().stream()
+ .filter(conn -> connectionProvidedName.equals(conn.get("user_provided_name")))
+ .findFirst()
+ .get();
+
+ String connectionName = connectionName(client);
+ assertThat(c).containsEntry("name", connectionName);
+
+ assertThat(c)
+ .hasEntrySatisfying("ssl", booleanFalse())
+ .hasEntrySatisfying("ssl_cipher", isNull())
+ .hasEntrySatisfying("ssl_hash", isNull())
+ .hasEntrySatisfying("ssl_key_exchange", isNull())
+ .hasEntrySatisfying("ssl_protocol", isNull());
+
+ Callable<Map<String, Object>> cRequest =
+ () -> toMap(get("/stream/connections/%2F/" + connectionName));
+ // wait until some stats are in the response
+ waitUntil(() -> cRequest.call().containsKey("recv_oct_details"));
+ c = cRequest.call();
+
+ Condition<Object> mapCondition = new Condition<>(e -> e instanceof Map, "Must be a map");
+ assertThat(c)
+ .hasEntrySatisfying("recv_oct_details", mapCondition)
+ .hasEntrySatisfying("send_oct_details", mapCondition)
+ .hasEntrySatisfying("garbage_collection", mapCondition)
+ .hasEntrySatisfying("reductions_details", mapCondition);
+
+ assertThat(closed.get()).isFalse();
+ delete("/stream/connections/%2F/" + connectionName);
+ waitUntil(closed::get);
+
+ assertThatThrownBy(cRequest::call).isInstanceOf(IOException.class);
+ waitUntil(() -> request.call().size() == initialCount);
+ }
+
+ @Test
+ void tlsConnections() throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/connections"));
+ int initialCount = request.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ try (Client client =
+ new Client(
+ new ClientParameters()
+ .eventLoopGroup(this.eventLoopGroup)
+ .sslContext(
+ SslContextBuilder.forClient()
+ .trustManager(new TrustEverythingTrustManager())
+ .build())
+ .port(TestUtils.streamPortTls())
+ .clientProperty("connection_name", connectionProvidedName))) {
+
+ waitUntil(() -> request.call().size() == initialCount + 1);
+
+ Map<String, Object> c =
+ request.call().stream()
+ .filter(conn -> connectionProvidedName.equals(conn.get("user_provided_name")))
+ .findFirst()
+ .get();
+
+ String connectionName = connectionName(client);
+ assertThat(c).containsEntry("name", connectionName);
+
+ assertThat(c)
+ .hasEntrySatisfying("ssl", booleanTrue())
+ .hasEntrySatisfying("ssl_cipher", notNull())
+ .hasEntrySatisfying("ssl_hash", notNull())
+ .hasEntrySatisfying("ssl_key_exchange", notNull())
+ .hasEntrySatisfying("ssl_protocol", notNull());
+ }
+ waitUntil(() -> request.call().size() == initialCount);
+ }
+
+ @Test
+ void connectionConsumers() throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/connections"));
+ int initialCount = request.call().size();
+ String s = UUID.randomUUID().toString();
+ Client c1 = cf.get(new ClientParameters().virtualHost("vh1"));
+ try {
+ c1.create(s);
+ assertThat(c1.subscribe((byte) 0, s, OffsetSpecification.first(), 10).isOk()).isTrue();
+ assertThat(c1.subscribe((byte) 1, s, OffsetSpecification.first(), 5).isOk()).isTrue();
+ Client c2 =
+ cf.get(
+ new ClientParameters()
+ .virtualHost("vh1")
+ .username("user-management")
+ .password("user-management"));
+ assertThat(c2.subscribe((byte) 0, s, OffsetSpecification.first(), 10).isOk()).isTrue();
+ waitUntil(() -> request.call().size() == initialCount + 2);
+
+ Callable<Map<String, Object>> cRequest =
+ () -> toMap(get("/stream/connections/vh1/" + connectionName(c1)));
+ // wait until some stats are in the response
+ waitUntil(() -> cRequest.call().containsKey("recv_oct_details"));
+
+ Callable<List<Map<String, Object>>> consumersRequest =
+ () -> toMaps(get("/stream/connections/vh1/" + connectionName(c1) + "/consumers"));
+ List<Map<String, Object>> consumers = consumersRequest.call();
+
+ assertThat(consumers).hasSize(2);
+ consumers.forEach(
+ c -> {
+ assertThat(c)
+ .containsKeys(
+ "subscription_id", "credits", "connection_details", "queue", "properties");
+ assertThat(c)
+ .extractingByKey("connection_details", as(MAP))
+ .containsValue(connectionName(c1));
+ });
+
+ consumersRequest =
+ () -> toMaps(get("/stream/connections/vh1/" + connectionName(c2) + "/consumers"));
+ consumers = consumersRequest.call();
+ assertThat(consumers).hasSize(1);
+ assertThat(consumers.get(0))
+ .extractingByKey("connection_details", as(MAP))
+ .containsValue(connectionName(c2));
+
+ assertThatThrownBy(
+ () ->
+ get(
+ httpClient("user-management"),
+ "/stream/connections/vh1/" + connectionName(c1) + "/consumers"))
+ .hasMessageContaining("401");
+ } finally {
+ c1.delete(s);
+ }
+ }
+
+ @Test
+ void connectionPublishers() throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/connections"));
+ int initialCount = request.call().size();
+ String s = UUID.randomUUID().toString();
+ Client c1 = cf.get(new ClientParameters().virtualHost("vh1"));
+ try {
+ c1.create(s);
+ assertThat(c1.declarePublisher((byte) 0, null, s).isOk()).isTrue();
+ assertThat(c1.declarePublisher((byte) 1, null, s).isOk()).isTrue();
+ Client c2 =
+ cf.get(
+ new ClientParameters()
+ .virtualHost("vh1")
+ .username("user-management")
+ .password("user-management"));
+ assertThat(c2.declarePublisher((byte) 0, null, s).isOk()).isTrue();
+ waitUntil(() -> request.call().size() == initialCount + 2);
+
+ Callable<Map<String, Object>> cRequest =
+ () -> toMap(get("/stream/connections/vh1/" + connectionName(c1)));
+ // wait until some stats are in the response
+ waitUntil(() -> cRequest.call().containsKey("recv_oct_details"));
+
+ Callable<List<Map<String, Object>>> publishersRequest =
+ () -> toMaps(get("/stream/connections/vh1/" + connectionName(c1) + "/publishers"));
+ List<Map<String, Object>> publishers = publishersRequest.call();
+
+ assertThat(publishers).hasSize(2);
+ publishers.forEach(
+ c -> {
+ assertThat(c)
+ .containsKeys(
+ "publisher_id",
+ "reference",
+ "published",
+ "confirmed",
+ "errored",
+ "connection_details",
+ "queue");
+ assertThat(c)
+ .extractingByKey("connection_details", as(MAP))
+ .containsValue(connectionName(c1));
+ });
+
+ publishersRequest =
+ () -> toMaps(get("/stream/connections/vh1/" + connectionName(c2) + "/publishers"));
+ publishers = publishersRequest.call();
+ assertThat(publishers).hasSize(1);
+ assertThat(publishers.get(0))
+ .extractingByKey("connection_details", as(MAP))
+ .containsValue(connectionName(c2));
+
+ assertThatThrownBy(
+ () ->
+ get(
+ httpClient("user-management"),
+ "/stream/connections/vh1/" + connectionName(c1) + "/publishers"))
+ .hasMessageContaining("401");
+ } finally {
+ c1.delete(s);
+ }
+ }
+
+ @Test
+ void publishers() throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/publishers"));
+ int initialCount = request.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ AtomicBoolean closed = new AtomicBoolean(false);
+ Client client =
+ cf.get(
+ new ClientParameters()
+ .clientProperty("connection_name", connectionProvidedName)
+ .shutdownListener(shutdownContext -> closed.set(true)));
+
+ client.declarePublisher((byte) 0, null, stream);
+ waitUntil(() -> request.call().size() == initialCount + 1);
+ assertThat(toMaps(get("/stream/publishers/%2F"))).hasSize(1);
+ assertThat(toMaps(get("/stream/publishers/vh1"))).isEmpty();
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+
+ Map<String, Object> publisher = entities(request.call(), client).get(0);
+ assertThat(publisher.get("reference").toString()).isEmpty();
+ assertThat(((Number) publisher.get("published")).intValue()).isEqualTo(0);
+ assertThat(((Number) publisher.get("confirmed")).intValue()).isEqualTo(0);
+ assertThat(((Number) publisher.get("errored")).intValue()).isEqualTo(0);
+ assertThat(((Number) publisher.get("publisher_id")).intValue()).isEqualTo(0);
+ assertThat(connectionDetails(publisher))
+ .containsEntry("name", connectionName(client))
+ .containsEntry("user", "guest")
+ .containsKey("node");
+ assertThat(queue(publisher)).containsEntry("name", stream).containsEntry("vhost", "/");
+
+ client.publish(
+ (byte) 0,
+ Collections.singletonList(
+ client.messageBuilder().addData("".getBytes(StandardCharsets.UTF_8)).build()));
+
+ waitUntil(
+ () -> ((Number) entities(request.call(), client).get(0).get("confirmed")).intValue() == 1);
+ publisher = entities(request.call(), client).get(0);
+ assertThat(((Number) publisher.get("published")).intValue()).isEqualTo(1);
+ assertThat(((Number) publisher.get("confirmed")).intValue()).isEqualTo(1);
+
+ client.declarePublisher((byte) 1, null, stream);
+ waitUntil(() -> entities(request.call(), client).size() == 2);
+
+ client.deletePublisher((byte) 0);
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+ client.deletePublisher((byte) 1);
+ waitUntil(() -> entities(request.call(), client).isEmpty());
+ }
+
+ @Test
+ void publishersByStream() throws Exception {
+ Callable<List<Map<String, Object>>> request =
+ () -> toMaps(get("/stream/publishers/%2F/" + stream));
+ int initialCount = request.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ AtomicBoolean closed = new AtomicBoolean(false);
+ Client client =
+ cf.get(
+ new ClientParameters()
+ .clientProperty("connection_name", connectionProvidedName)
+ .shutdownListener(shutdownContext -> closed.set(true)));
+
+ String otherStream = UUID.randomUUID().toString();
+ assertThat(client.create(otherStream).isOk()).isTrue();
+
+ client.declarePublisher((byte) 0, null, stream);
+ client.declarePublisher((byte) 1, null, otherStream);
+
+ waitUntil(() -> toMaps(get("/stream/publishers/%2F")).size() == initialCount + 2);
+ waitUntil(() -> request.call().size() == initialCount + 1);
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+
+ Map<String, Object> publisher = entities(request.call(), client).get(0);
+ assertThat(connectionDetails(publisher))
+ .containsEntry("name", connectionName(client))
+ .containsEntry("user", "guest")
+ .containsKey("node");
+ assertThat(queue(publisher)).containsEntry("name", stream).containsEntry("vhost", "/");
+
+ Callable<List<Map<String, Object>>> requestOtherStream =
+ () -> toMaps(get("/stream/publishers/%2F/" + otherStream));
+ waitUntil(() -> entities(requestOtherStream.call(), client).size() == 1);
+
+ publisher = entities(requestOtherStream.call(), client).get(0);
+ assertThat(connectionDetails(publisher))
+ .containsEntry("name", connectionName(client))
+ .containsEntry("user", "guest")
+ .containsKey("node");
+ assertThat(queue(publisher)).containsEntry("name", otherStream).containsEntry("vhost", "/");
+
+ client.deletePublisher((byte) 0);
+ client.deletePublisher((byte) 1);
+ }
+
+ @ParameterizedTest
+ @ValueSource(strings = {"foo"})
+ @NullSource
+ void publisherReference(String reference) throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/publishers"));
+ int initialCount = request.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ AtomicBoolean closed = new AtomicBoolean(false);
+ Client client =
+ cf.get(
+ new ClientParameters()
+ .clientProperty("connection_name", connectionProvidedName)
+ .shutdownListener(shutdownContext -> closed.set(true)));
+
+ client.declarePublisher((byte) 0, reference, stream);
+ waitUntil(() -> request.call().size() == initialCount + 1);
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+
+ Map<String, Object> publisher = entities(request.call(), client).get(0);
+ String publisherReference = (String) publisher.get("reference");
+ if (reference == null || reference.isEmpty()) {
+ assertThat(publisherReference).isEmpty();
+ } else {
+ assertThat(publisher.get("reference").toString()).isEqualTo(reference);
+ }
+ }
+
+ @ParameterizedTest
+ @ValueSource(strings = {"foo"})
+ @NullSource
+ void publisherShouldBeDeletedAfterStreamDeletion(String reference) throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/publishers"));
+ int initialCount = request.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ String s = UUID.randomUUID().toString();
+ AtomicBoolean closed = new AtomicBoolean(false);
+ Client client =
+ cf.get(
+ new ClientParameters()
+ .clientProperty("connection_name", connectionProvidedName)
+ .shutdownListener(shutdownContext -> closed.set(true)));
+
+ client.create(s);
+ client.declarePublisher((byte) 0, reference, s);
+ waitUntil(() -> request.call().size() == initialCount + 1);
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+
+ client.delete(s);
+ waitUntil(() -> request.call().size() == 0);
+ }
+
+ @Test
+ void consumerShouldBeDeletedAfterStreamDeletion() throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/consumers"));
+ int initialCount = request.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ String s = UUID.randomUUID().toString();
+ AtomicBoolean closed = new AtomicBoolean(false);
+ Client client =
+ cf.get(
+ new ClientParameters()
+ .clientProperty("connection_name", connectionProvidedName)
+ .shutdownListener(shutdownContext -> closed.set(true)));
+
+ client.create(s);
+ client.subscribe((byte) 0, s, OffsetSpecification.first(), 10);
+ waitUntil(() -> request.call().size() == initialCount + 1);
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+
+ client.delete(s);
+ waitUntil(() -> request.call().size() == initialCount);
+ }
+
+ @ParameterizedTest
+ @MethodSource("subscriptionProperties")
+ void consumers(Map<String, String> subscriptionProperties) throws Exception {
+ Callable<List<Map<String, Object>>> request = () -> toMaps(get("/stream/consumers"));
+ int initialCount = request.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ AtomicBoolean closed = new AtomicBoolean(false);
+ Client client =
+ cf.get(
+ new ClientParameters()
+ .clientProperty("connection_name", connectionProvidedName)
+ .chunkListener(
+ (client1, subscriptionId, offset, messageCount, dataSize) ->
+ client1.credit(subscriptionId, 1))
+ .shutdownListener(shutdownContext -> closed.set(true)));
+
+ client.subscribe((byte) 0, stream, OffsetSpecification.first(), 10, subscriptionProperties);
+ waitUntil(() -> request.call().size() == initialCount + 1);
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+
+ Map<String, Object> consumer = entities(request.call(), client).get(0);
+ assertThat(((Number) consumer.get("credits")).intValue()).isEqualTo(10);
+ assertThat(((Number) consumer.get("consumed")).intValue()).isEqualTo(0);
+ assertThat(((Number) consumer.get("offset")).intValue()).isEqualTo(0);
+ assertThat(((Number) consumer.get("subscription_id")).intValue()).isEqualTo(0);
+ assertThat(consumer.get("properties")).isNotNull().isEqualTo(subscriptionProperties);
+
+ assertThat(connectionDetails(consumer))
+ .containsEntry("name", connectionName(client))
+ .containsEntry("user", "guest")
+ .containsKey("node");
+ assertThat(queue(consumer)).containsEntry("name", stream).containsEntry("vhost", "/");
+
+ client.subscribe((byte) 1, stream, OffsetSpecification.first(), 10);
+ waitUntil(() -> entities(request.call(), client).size() == 2);
+
+ client.unsubscribe((byte) 0);
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+
+ int messageCount = 10_000;
+ assertThat(client.declarePublisher((byte) 0, null, stream).isOk()).isTrue();
+ IntStream.range(0, messageCount)
+ .forEach(
+ i ->
+ client.publish(
+ (byte) 0,
+ Collections.singletonList(
+ client
+ .messageBuilder()
+ .addData("".getBytes(StandardCharsets.UTF_8))
+ .build())));
+
+ waitUntil(
+ () -> {
+ Map<String, Object> c = entities(request.call(), client).get(0);
+ return ((Number) c.get("consumed")).intValue() == messageCount;
+ });
+
+ consumer = entities(request.call(), client).get(0);
+ assertThat(((Number) consumer.get("consumed")).intValue()).isEqualTo(messageCount);
+ assertThat(((Number) consumer.get("offset")).intValue()).isPositive();
+
+ client.unsubscribe((byte) 1);
+ waitUntil(() -> entities(request.call(), client).isEmpty());
+
+ client.subscribe((byte) 0, stream, OffsetSpecification.next(), 10);
+ waitUntil(() -> request.call().size() == initialCount + 1);
+ waitUntil(() -> entities(request.call(), client).size() == 1);
+
+ consumer = entities(request.call(), client).get(0);
+ assertThat(((Number) consumer.get("consumed")).intValue()).isEqualTo(0);
+ assertThat(((Number) consumer.get("offset")).intValue()).isEqualTo(messageCount);
+
+ client.unsubscribe((byte) 0);
+ waitUntil(() -> entities(request.call(), client).isEmpty());
+ }
+
+ @Test
+ void permissions() throws Exception {
+ String[][] vhostsUsers =
+ new String[][] {
+ {"/", "guest"},
+ {"vh1", "user-management"},
+ {"vh1", "user-management"},
+ {"vh2", "guest"},
+ {"vh2", "guest"},
+ };
+ Map<String, Client> vhostClients = new HashMap<>();
+ List<Client> clients =
+ Arrays.stream(vhostsUsers)
+ .map(
+ vhostUser -> {
+ Client c =
+ cf.get(
+ new ClientParameters()
+ .virtualHost(vhostUser[0])
+ .username(vhostUser[1])
+ .password(vhostUser[1]));
+ vhostClients.put(vhostUser[0], c);
+ return c;
+ })
+ .collect(Collectors.toList());
+
+ List<String> nonDefaultVhosts =
+ Arrays.stream(vhostsUsers)
+ .map(vhostUser -> vhostUser[0])
+ .filter(vhost -> !vhost.equals("/"))
+ .distinct()
+ .collect(Collectors.toList());
+ nonDefaultVhosts.forEach(
+ vhost -> {
+ Client c = vhostClients.get(vhost);
+ c.create(stream);
+ });
+
+ try {
+ int entitiesPerConnection = 2;
+
+ IntStream.range(0, entitiesPerConnection)
+ .forEach(
+ i ->
+ clients.forEach(
+ c -> {
+ c.subscribe((byte) i, stream, OffsetSpecification.first(), 10);
+ c.declarePublisher((byte) i, null, stream);
+ }));
+ Callable<List<Map<String, Object>>> allConnectionsRequest =
+ () -> toMaps(get("/stream/connections"));
+ int initialCount = allConnectionsRequest.call().size();
+ waitUntil(() -> allConnectionsRequest.call().size() == initialCount + 5);
+
+ String vhost1ConnectionName =
+ toMaps(get("/stream/connections/vh1")).stream()
+ .filter(c -> "vh1".equals(c.get("vhost")))
+ .map(c -> c.get("name").toString())
+ .findFirst()
+ .get();
+
+ String vhost2ConnectionName =
+ toMaps(get("/stream/connections/vh2")).stream()
+ .filter(c -> "vh2".equals(c.get("vhost")))
+ .map(c -> c.get("name").toString())
+ .findFirst()
+ .get();
+
+ PermissionsTestConfiguration[] testConfigurations =
+ new PermissionsTestConfiguration[] {
+ new PermissionsTestConfiguration(
+ "guest",
+ "/connections",
+ requests(r("", 5), r("/%2f", 1), r("/vh1", 2), r("/vh2", 2)),
+ "vh1/" + vhost1ConnectionName,
+ true,
+ "vh2/" + vhost2ConnectionName,
+ true),
+ new PermissionsTestConfiguration(
+ "user-monitoring",
+ "/connections",
+ requests(r("", 5), r("/%2f", 1), r("/vh1", 2), r("/vh2", 2)),
+ "vh1/" + vhost1ConnectionName,
+ true,
+ "vh2/" + vhost2ConnectionName,
+ true),
+ new PermissionsTestConfiguration(
+ "user-management",
+ "/connections",
+ requests(r("", 2), r("/%2f", -1), r("/vh1", 2), r("/vh2", -1)),
+ "vh1/" + vhost1ConnectionName,
+ true,
+ "vh2/" + vhost2ConnectionName,
+ false),
+ new PermissionsTestConfiguration(
+ "guest",
+ "",
+ requests(
+ r("/consumers", vhostsUsers.length * entitiesPerConnection),
+ r("/publishers", vhostsUsers.length * entitiesPerConnection),
+ r("/consumers/%2f", entitiesPerConnection),
+ r("/publishers/%2f", entitiesPerConnection),
+ r("/consumers/vh1", entitiesPerConnection * 2),
+ r("/publishers/vh1", entitiesPerConnection * 2))),
+ new PermissionsTestConfiguration(
+ "user-management",
+ "",
+ requests(
+ r("/consumers", entitiesPerConnection * 2), // only their connections
+ r("/publishers", entitiesPerConnection * 2), // only their connections
+ r("/consumers/vh1", entitiesPerConnection * 2),
+ r("/publishers/vh1", entitiesPerConnection * 2),
+ r("/consumers/vh2", 0),
+ r("/consumers/vh2", 0)))
+ };
+
+ for (PermissionsTestConfiguration configuration : testConfigurations) {
+ OkHttpClient client = httpClient(configuration.user);
+ for (TestRequest request : configuration.requests) {
+ if (request.expectedCount >= 0) {
+ assertThat(toMaps(get(client, "/stream" + configuration.endpoint + request.endpoint)))
+ .hasSize(request.expectedCount);
+ } else {
+ assertThatThrownBy(
+ () ->
+ toMaps(get(client, "/stream" + configuration.endpoint + request.endpoint)))
+ .hasMessageContaining("401");
+ }
+ }
+ for (Entry<String, Boolean> request : configuration.vhostConnections.entrySet()) {
+ if (request.getValue()) {
+ Condition<Object> connNameCondition =
+ new Condition<>(
+ e -> request.getKey().endsWith(e.toString()), "connection name must match");
+ assertThat(toMap(get(client, "/stream/connections/" + request.getKey())))
+ .hasEntrySatisfying("name", connNameCondition);
+ } else {
+ assertThatThrownBy(() -> toMap(get(client, "/stream/connections/" + request.getKey())))
+ .hasMessageContaining("401");
+ }
+ }
+ }
+
+ clients.forEach(Client::close);
+ waitUntil(() -> allConnectionsRequest.call().size() == initialCount);
+ } finally {
+ nonDefaultVhosts.forEach(
+ vhost -> {
+ Client c = cf.get(new ClientParameters().virtualHost(vhost));
+ c.delete(stream);
+ });
+ }
+ }
+
+ @ParameterizedTest
+ @ValueSource(
+ strings = {
+ "/stream/connections/%2F/foo-connection-name",
+ "/stream/connections/foo-virtual-host",
+ "/stream/connections/foo-virtual-host/foo-connection-name",
+ "/stream/connections/%2F/foo-connection-name/consumers",
+ "/stream/connections/%2F/foo-connection-name/publishers",
+ "/stream/consumers/foo-virtual-host",
+ "/stream/publishers/foo-virtual-host",
+ "/stream/publishers/foo-virtual-host",
+ "/stream/publishers/%2F/foo-stream"
+ })
+ void shouldReturnNotFound(String endpoint) {
+ assertThatThrownBy(() -> get(endpoint)).hasMessageContaining("404");
+ }
+
+ @ParameterizedTest
+ @MethodSource("subscriptionProperties")
+ @SuppressWarnings("unchecked")
+ void streamConsumersShouldShowUpAsRegularConsumers(Map<String, String> subscriptionProperties)
+ throws Exception {
+ Callable<List<Map<String, Object>>> consumersRequest = () -> toMaps(get("/consumers"));
+ int initialCount = consumersRequest.call().size();
+ String connectionProvidedName = UUID.randomUUID().toString();
+ String s = UUID.randomUUID().toString();
+ AtomicBoolean closed = new AtomicBoolean(false);
+ Client client =
+ cf.get(
+ new ClientParameters()
+ .clientProperty("connection_name", connectionProvidedName)
+ .shutdownListener(shutdownContext -> closed.set(true)));
+
+ try {
+
+ client.create(s);
+ client.subscribe((byte) 0, s, OffsetSpecification.first(), 10, subscriptionProperties);
+ waitUntil(() -> consumersRequest.call().size() == initialCount + 1);
+
+ String consumerTagPrefix = "stream.subid-";
+ Map<String, Object> consumersConsumer =
+ entity(
+ consumersRequest.call(),
+ m ->
+ m.get("consumer_tag").toString().startsWith(consumerTagPrefix)
+ && m.get("channel_details") != null
+ && m.get("channel_details") instanceof Map
+ && connectionName(client)
+ .equals(
+ ((Map<String, Object>) m.get("channel_details"))
+ .get("connection_name")));
+
+ Callable<Map<String, Object>> queueRequest = () -> toMap(get("/queues/%2F/" + s));
+ Map<String, Object> queueDetails = queueRequest.call();
+
+ assertThat(queueDetails).containsKey("consumer_details");
+ assertThat(queueDetails.get("consumer_details")).isInstanceOf(List.class).asList().hasSize(1);
+ Map<String, Object> queueConsumer =
+ ((List<Map<String, Object>>) queueDetails.get("consumer_details")).get(0);
+
+ Stream.of(consumersConsumer, queueConsumer)
+ .forEach(
+ consumer -> {
+ assertThat(consumer)
+ .isNotNull()
+ .containsEntry("ack_required", false)
+ .containsEntry("active", true)
+ .containsEntry("activity_status", "up")
+ .containsEntry("consumer_tag", consumerTagPrefix + "0")
+ .containsEntry("exclusive", false)
+ .containsEntry("arguments", subscriptionProperties)
+ .hasEntrySatisfying(
+ "prefetch_count", o -> assertThat(((Number) o).intValue()).isZero());
+
+ Map<String, String> queue = (Map<String, String>) consumer.get("queue");
+ assertThat(queue).isNotNull().containsEntry("name", s);
+
+ Map<String, Object> channel = (Map<String, Object>) consumer.get("channel_details");
+ assertThat(channel)
+ .isNotNull()
+ .containsEntry("connection_name", connectionName(client))
+ .containsEntry("name", "")
+ .hasEntrySatisfying("number", o -> assertThat(((Number) o).intValue()).isZero())
+ .containsEntry("user", "guest")
+ .containsKeys("node", "peer_host", "peer_port");
+ });
+ Client.Response response = client.unsubscribe((byte) 0);
+ assertThat(response.isOk()).isTrue();
+ waitUntil(() -> consumersRequest.call().size() == initialCount);
+ } finally {
+ client.delete(s);
+ }
+ }
+
+ static class PermissionsTestConfiguration {
+ final String user;
+ final String endpoint;
+ final TestRequest[] requests;
+ final Map<String, Boolean> vhostConnections;
+
+ PermissionsTestConfiguration(
+ String user, String endpoint, TestRequest[] requests, Object... vhostConnections) {
+ this.user = user;
+ this.endpoint = endpoint;
+ this.requests = requests;
+ this.vhostConnections = new LinkedHashMap<>();
+ for (int i = 0; i < vhostConnections.length; i = i + 2) {
+ this.vhostConnections.put(
+ vhostConnections[i].toString(), (Boolean) vhostConnections[i + 1]);
+ }
+ }
+ }
+
+ static class TestRequest {
+ final String endpoint;
+ final int expectedCount;
+
+ TestRequest(String endpoint, int expectedCount) {
+ this.endpoint = endpoint;
+ this.expectedCount = expectedCount;
+ }
+ }
+}
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java b/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java
new file mode 100644
index 0000000000..51a84f04f4
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/java/com/rabbitmq/stream/TestUtils.java
@@ -0,0 +1,260 @@
+// The contents of this file are subject to the Mozilla Public License
+// Version 2.0 (the "License"); you may not use this file except in
+// compliance with the License. You may obtain a copy of the License
+// at https://www.mozilla.org/en-US/MPL/2.0/
+//
+// Software distributed under the License is distributed on an "AS IS"
+// basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+// the License for the specific language governing rights and
+// limitations under the License.
+//
+// The Original Code is RabbitMQ.
+//
+// The Initial Developer of the Original Code is Pivotal Software, Inc.
+// Copyright (c) 2020-2021 VMware, Inc. or its affiliates. All rights reserved.
+//
+
+package com.rabbitmq.stream;
+
+import static java.util.concurrent.TimeUnit.SECONDS;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.fail;
+
+import com.rabbitmq.stream.impl.Client;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.nio.NioEventLoopGroup;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.security.cert.X509Certificate;
+import java.time.Duration;
+import java.util.Objects;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import javax.net.ssl.X509TrustManager;
+import okhttp3.Authenticator;
+import okhttp3.Credentials;
+import org.assertj.core.api.Condition;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.api.extension.AfterAllCallback;
+import org.junit.jupiter.api.extension.AfterEachCallback;
+import org.junit.jupiter.api.extension.BeforeAllCallback;
+import org.junit.jupiter.api.extension.BeforeEachCallback;
+import org.junit.jupiter.api.extension.ExtensionContext;
+
+public class TestUtils {
+
+ static int streamPort() {
+ String port = System.getProperty("stream.port", "5552");
+ return Integer.valueOf(port);
+ }
+
+ static int streamPortTls() {
+ String port = System.getProperty("stream.port.tls", "5551");
+ return Integer.valueOf(port);
+ }
+
+ static int managementPort() {
+ String port = System.getProperty("management.port", "15672");
+ return Integer.valueOf(port);
+ }
+
+ static void waitUntil(CallableBooleanSupplier condition) throws Exception {
+ waitAtMost(Duration.ofSeconds(10), condition);
+ }
+
+ static void waitAtMost(Duration duration, CallableBooleanSupplier condition) throws Exception {
+ if (condition.getAsBoolean()) {
+ return;
+ }
+ int waitTime = 100;
+ int waitedTime = 0;
+ long timeoutInMs = duration.toMillis();
+ while (waitedTime <= timeoutInMs) {
+ Thread.sleep(waitTime);
+ if (condition.getAsBoolean()) {
+ return;
+ }
+ waitedTime += waitTime;
+ }
+ fail("Waited " + duration.getSeconds() + " second(s), condition never got true");
+ }
+
+ static Authenticator authenticator(String usernamePassword) {
+ return (route, response) -> {
+ if (response.request().header("Authorization") != null) {
+ return null; // Give up, we've already attempted to authenticate.
+ }
+ return response
+ .request()
+ .newBuilder()
+ .header("Authorization", Credentials.basic(usernamePassword, usernamePassword))
+ .build();
+ };
+ }
+
+ @FunctionalInterface
+ interface CallableBooleanSupplier {
+ boolean getAsBoolean() throws Exception;
+ }
+
+ static class StreamTestInfrastructureExtension
+ implements BeforeAllCallback, AfterAllCallback, BeforeEachCallback, AfterEachCallback {
+
+ private static final ExtensionContext.Namespace NAMESPACE =
+ ExtensionContext.Namespace.create(StreamTestInfrastructureExtension.class);
+
+ private static ExtensionContext.Store store(ExtensionContext extensionContext) {
+ return extensionContext.getRoot().getStore(NAMESPACE);
+ }
+
+ private static EventLoopGroup eventLoopGroup(ExtensionContext context) {
+ return (EventLoopGroup) store(context).get("nettyEventLoopGroup");
+ }
+
+ @Override
+ public void beforeAll(ExtensionContext context) {
+ store(context).put("nettyEventLoopGroup", new NioEventLoopGroup());
+ }
+
+ @Override
+ public void beforeEach(ExtensionContext context) throws Exception {
+ try {
+ Field streamField =
+ context.getTestInstance().get().getClass().getDeclaredField("eventLoopGroup");
+ streamField.setAccessible(true);
+ streamField.set(context.getTestInstance().get(), eventLoopGroup(context));
+ } catch (NoSuchFieldException e) {
+
+ }
+ try {
+ Field streamField = context.getTestInstance().get().getClass().getDeclaredField("stream");
+ streamField.setAccessible(true);
+ String stream = streamName(context);
+ streamField.set(context.getTestInstance().get(), stream);
+ Client client =
+ new Client(
+ new Client.ClientParameters()
+ .eventLoopGroup(eventLoopGroup(context))
+ .port(streamPort()));
+ Client.Response response = client.create(stream);
+ assertThat(response.isOk()).isTrue();
+ client.close();
+ store(context).put("testMethodStream", stream);
+ } catch (NoSuchFieldException e) {
+
+ }
+
+ for (Field declaredField : context.getTestInstance().get().getClass().getDeclaredFields()) {
+ if (declaredField.getType().equals(ClientFactory.class)) {
+ declaredField.setAccessible(true);
+ ClientFactory clientFactory = new ClientFactory(eventLoopGroup(context));
+ declaredField.set(context.getTestInstance().get(), clientFactory);
+ store(context).put("testClientFactory", clientFactory);
+ break;
+ }
+ }
+ }
+
+ @Override
+ public void afterEach(ExtensionContext context) throws Exception {
+ try {
+ Field streamField = context.getTestInstance().get().getClass().getDeclaredField("stream");
+ streamField.setAccessible(true);
+ String stream = (String) streamField.get(context.getTestInstance().get());
+ Client client =
+ new Client(
+ new Client.ClientParameters()
+ .eventLoopGroup(eventLoopGroup(context))
+ .port(streamPort()));
+ Client.Response response = client.delete(stream);
+ assertThat(response.isOk()).isTrue();
+ client.close();
+ store(context).remove("testMethodStream");
+ } catch (NoSuchFieldException e) {
+
+ }
+
+ ClientFactory clientFactory = (ClientFactory) store(context).get("testClientFactory");
+ if (clientFactory != null) {
+ clientFactory.close();
+ }
+ }
+
+ @Override
+ public void afterAll(ExtensionContext context) throws Exception {
+ EventLoopGroup eventLoopGroup = eventLoopGroup(context);
+ eventLoopGroup.shutdownGracefully(1, 10, SECONDS).get(10, SECONDS);
+ }
+ }
+
+ static String streamName(TestInfo info) {
+ return streamName(info.getTestClass().get(), info.getTestMethod().get());
+ }
+
+ private static String streamName(ExtensionContext context) {
+ return streamName(context.getTestInstance().get().getClass(), context.getTestMethod().get());
+ }
+
+ private static String streamName(Class<?> testClass, Method testMethod) {
+ String uuid = UUID.randomUUID().toString();
+ return String.format(
+ "%s_%s%s",
+ testClass.getSimpleName(), testMethod.getName(), uuid.substring(uuid.length() / 2));
+ }
+
+ static class ClientFactory {
+
+ private final EventLoopGroup eventLoopGroup;
+ private final Set<Client> clients = ConcurrentHashMap.newKeySet();
+
+ public ClientFactory(EventLoopGroup eventLoopGroup) {
+ this.eventLoopGroup = eventLoopGroup;
+ }
+
+ public Client get() {
+ return get(new Client.ClientParameters());
+ }
+
+ public Client get(Client.ClientParameters parameters) {
+ Client client = new Client(parameters.eventLoopGroup(eventLoopGroup).port(streamPort()));
+ clients.add(client);
+ return client;
+ }
+
+ private void close() {
+ for (Client c : clients) {
+ c.close();
+ }
+ }
+ }
+
+ static class TrustEverythingTrustManager implements X509TrustManager {
+ @Override
+ public void checkClientTrusted(X509Certificate[] chain, String authType) {}
+
+ @Override
+ public void checkServerTrusted(X509Certificate[] chain, String authType) {}
+
+ @Override
+ public X509Certificate[] getAcceptedIssuers() {
+ return new X509Certificate[0];
+ }
+ }
+
+ static Condition<Object> booleanTrue() {
+ return new Condition<>(obj -> obj.equals(Boolean.TRUE), "true");
+ }
+
+ static Condition<Object> booleanFalse() {
+ return new Condition<>(obj -> obj.equals(Boolean.FALSE), "false");
+ }
+
+ static Condition<Object> notNull() {
+ return new Condition<>(Objects::nonNull, "not null");
+ }
+
+ static Condition<Object> isNull() {
+ return new Condition<>(Objects::isNull, "null");
+ }
+}
diff --git a/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/resources/logback-test.xml b/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/resources/logback-test.xml
new file mode 100644
index 0000000000..45d598991d
--- /dev/null
+++ b/deps/rabbitmq_stream_management/test/http_SUITE_data/src/test/resources/logback-test.xml
@@ -0,0 +1,13 @@
+<configuration>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <logger name="com.rabbitmq.stream" level="info" />
+
+ <root level="info">
+ <appender-ref ref="STDOUT" />
+ </root>
+</configuration> \ No newline at end of file
diff --git a/deps/rabbitmq_top/BUILD.bazel b/deps/rabbitmq_top/BUILD.bazel
new file mode 100644
index 0000000000..5364b28373
--- /dev/null
+++ b/deps/rabbitmq_top/BUILD.bazel
@@ -0,0 +1,44 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_top"
+
+APP_DESCRIPTION = "RabbitMQ Top"
+
+APP_MODULE = "rabbit_top_app"
+
+BUILD_DEPS = [
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_management:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
diff --git a/deps/rabbitmq_top/CONTRIBUTING.md b/deps/rabbitmq_top/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_top/CONTRIBUTING.md
+++ b/deps/rabbitmq_top/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_top/Makefile b/deps/rabbitmq_top/Makefile
index 36914886f7..4e38544d44 100644
--- a/deps/rabbitmq_top/Makefile
+++ b/deps/rabbitmq_top/Makefile
@@ -17,5 +17,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_top/erlang.mk b/deps/rabbitmq_top/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_top/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_top/rabbitmq-components.mk b/deps/rabbitmq_top/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_top/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_top/src/rabbit_top_app.erl b/deps/rabbitmq_top/src/rabbit_top_app.erl
index 6ad279a586..8cb2fc0e57 100644
--- a/deps/rabbitmq_top/src/rabbit_top_app.erl
+++ b/deps/rabbitmq_top/src/rabbit_top_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_top_app).
diff --git a/deps/rabbitmq_top/src/rabbit_top_extension.erl b/deps/rabbitmq_top/src/rabbit_top_extension.erl
index 26961b85bf..dadc423a92 100644
--- a/deps/rabbitmq_top/src/rabbit_top_extension.erl
+++ b/deps/rabbitmq_top/src/rabbit_top_extension.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_top_extension).
diff --git a/deps/rabbitmq_top/src/rabbit_top_sup.erl b/deps/rabbitmq_top/src/rabbit_top_sup.erl
index 63e9bef08e..f5d9d2847d 100644
--- a/deps/rabbitmq_top/src/rabbit_top_sup.erl
+++ b/deps/rabbitmq_top/src/rabbit_top_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_top_sup).
diff --git a/deps/rabbitmq_top/src/rabbit_top_util.erl b/deps/rabbitmq_top/src/rabbit_top_util.erl
index 985e5344d5..f5517cf70b 100644
--- a/deps/rabbitmq_top/src/rabbit_top_util.erl
+++ b/deps/rabbitmq_top/src/rabbit_top_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_top_util).
diff --git a/deps/rabbitmq_top/src/rabbit_top_wm_ets_tables.erl b/deps/rabbitmq_top/src/rabbit_top_wm_ets_tables.erl
index 5ae17b4f97..7ea279f310 100644
--- a/deps/rabbitmq_top/src/rabbit_top_wm_ets_tables.erl
+++ b/deps/rabbitmq_top/src/rabbit_top_wm_ets_tables.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_top_wm_ets_tables).
diff --git a/deps/rabbitmq_top/src/rabbit_top_wm_process.erl b/deps/rabbitmq_top/src/rabbit_top_wm_process.erl
index 0a80105a65..c69dacfee8 100644
--- a/deps/rabbitmq_top/src/rabbit_top_wm_process.erl
+++ b/deps/rabbitmq_top/src/rabbit_top_wm_process.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_top_wm_process).
diff --git a/deps/rabbitmq_top/src/rabbit_top_wm_processes.erl b/deps/rabbitmq_top/src/rabbit_top_wm_processes.erl
index a5f14d429f..67638a8c18 100644
--- a/deps/rabbitmq_top/src/rabbit_top_wm_processes.erl
+++ b/deps/rabbitmq_top/src/rabbit_top_wm_processes.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_top_wm_processes).
diff --git a/deps/rabbitmq_top/src/rabbit_top_worker.erl b/deps/rabbitmq_top/src/rabbit_top_worker.erl
index ef6b0c984f..e2ed3ef56b 100644
--- a/deps/rabbitmq_top/src/rabbit_top_worker.erl
+++ b/deps/rabbitmq_top/src/rabbit_top_worker.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_top_worker).
diff --git a/deps/rabbitmq_tracing/BUILD.bazel b/deps/rabbitmq_tracing/BUILD.bazel
new file mode 100644
index 0000000000..8b3e903a6c
--- /dev/null
+++ b/deps/rabbitmq_tracing/BUILD.bazel
@@ -0,0 +1,70 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_tracing"
+
+APP_DESCRIPTION = "RabbitMQ message logging / tracing"
+
+APP_MODULE = "rabbit_tracing_app"
+
+APP_ENV = """[
+ {directory, "/var/tmp/rabbitmq-tracing"},
+ {username, <<"guest">>},
+ {password, <<"guest">>}
+ ]"""
+
+BUILD_DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbitmq_management_agent:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_management:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_tracing"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_tracing_SUITE",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_tracing/Makefile b/deps/rabbitmq_tracing/Makefile
index 8ff32ddd79..5c63c75693 100644
--- a/deps/rabbitmq_tracing/Makefile
+++ b/deps/rabbitmq_tracing/Makefile
@@ -26,5 +26,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_tracing/erlang.mk b/deps/rabbitmq_tracing/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_tracing/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_tracing/rabbitmq-components.mk b/deps/rabbitmq_tracing/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_tracing/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_app.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_app.erl
index 394f44a9d0..47517e5e0c 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_app.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_app).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_consumer.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_consumer.erl
index c610790086..7ebe4d0a7a 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_consumer.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_consumer.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_consumer).
@@ -76,7 +76,7 @@ init(Args0) ->
rabbit_tracing_traces:announce(VHost, Name, self()),
Format = list_to_atom(binary_to_list(pget(format, Args))),
rabbit_log:info("Tracer opened log file ~p with "
- "format ~p~n", [Filename, Format]),
+ "format ~p", [Filename, Format]),
{ok, #state{conn = Conn, ch = Ch, vhost = VHost, queue = Q,
file = F, filename = Filename,
format = Format, buf = [], buf_cnt = 0,
@@ -119,7 +119,7 @@ terminate(shutdown, State = #state{conn = Conn, ch = Ch,
catch amqp_channel:close(Ch),
catch amqp_connection:close(Conn),
catch prim_file:close(F),
- rabbit_log:info("Tracer closed log file ~p~n", [Filename]),
+ rabbit_log:info("Tracer closed log file ~p", [Filename]),
ok;
terminate(_Reason, _State) ->
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_consumer_sup.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_consumer_sup.erl
index b651a75a99..984bff0f14 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_consumer_sup.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_consumer_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_consumer_sup).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_files.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_files.erl
index dc3a70b7cc..fa260a4077 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_files.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_files.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_files).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_mgmt.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_mgmt.erl
index c408150939..b16456213f 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_mgmt.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_mgmt.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_mgmt).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_sup.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_sup.erl
index d8fdd94d81..df764c4257 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_sup.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_sup.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_sup).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_traces.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_traces.erl
index fc2d2b1520..deac0eeb06 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_traces.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_traces.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_traces).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_util.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_util.erl
index 93cb1dcb20..0f80019de9 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_util.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_util).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_wm_file.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_wm_file.erl
index 17c225cba6..acea4077b8 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_wm_file.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_wm_file.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_wm_file).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_wm_files.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_wm_files.erl
index 74dc527524..6d87360934 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_wm_files.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_wm_files.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_wm_files).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_wm_trace.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_wm_trace.erl
index 92474be77e..f1af706ce4 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_wm_trace.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_wm_trace.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_wm_trace).
diff --git a/deps/rabbitmq_tracing/src/rabbit_tracing_wm_traces.erl b/deps/rabbitmq_tracing/src/rabbit_tracing_wm_traces.erl
index f41dc76336..98a3c7698d 100644
--- a/deps/rabbitmq_tracing/src/rabbit_tracing_wm_traces.erl
+++ b/deps/rabbitmq_tracing/src/rabbit_tracing_wm_traces.erl
@@ -1,17 +1,8 @@
-%% The contents of this file are subject to the Mozilla Public License
-%% Version 1.1 (the "License"); you may not use this file except in
-%% compliance with the License. You may obtain a copy of the License
-%% at https://www.mozilla.org/MPL/
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and
-%% limitations under the License.
-%%
-%% The Original Code is RabbitMQ.
-%%
-%% The Initial Developer of the Original Code is GoPivotal, Inc.
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_wm_traces).
diff --git a/deps/rabbitmq_tracing/test/rabbit_tracing_SUITE.erl b/deps/rabbitmq_tracing/test/rabbit_tracing_SUITE.erl
index 9c34ca6605..1696bd9ae5 100644
--- a/deps/rabbitmq_tracing/test/rabbit_tracing_SUITE.erl
+++ b/deps/rabbitmq_tracing/test/rabbit_tracing_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_tracing_SUITE).
diff --git a/deps/rabbitmq_trust_store/BUILD.bazel b/deps/rabbitmq_trust_store/BUILD.bazel
new file mode 100644
index 0000000000..7bbf6fd23e
--- /dev/null
+++ b/deps/rabbitmq_trust_store/BUILD.bazel
@@ -0,0 +1,101 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_trust_store"
+
+APP_DESCRIPTION = "Client X.509 certificates trust store"
+
+APP_MODULE = "rabbit_trust_store_app"
+
+APP_ENV = """[
+ {default_refresh_interval, 30},
+ {providers, [rabbit_trust_store_file_provider]}
+ ]"""
+
+EXTRA_APPS = [
+ "ssl",
+ "crypto",
+ "public_key",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ extra_apps = EXTRA_APPS,
+ first_srcs = [
+ "src/rabbit_trust_store_certificate_provider.erl",
+ ],
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+rabbitmq_home(
+ name = "broker-for-tests-home",
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ ":bazel_erlang_lib",
+ "//deps/amqp_client:bazel_erlang_lib",
+ "@ct_helper//:bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ home = ":broker-for-tests-home",
+)
+
+PACKAGE = "deps/rabbitmq_trust_store"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ flaky = True,
+ runtime_deps = [
+ "@ct_helper//:bazel_erlang_lib",
+ "@trust_store_http//:bazel_erlang_lib",
+ ],
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_trust_store/CONTRIBUTING.md b/deps/rabbitmq_trust_store/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_trust_store/CONTRIBUTING.md
+++ b/deps/rabbitmq_trust_store/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_trust_store/Makefile b/deps/rabbitmq_trust_store/Makefile
index d9629187de..ac001d0eec 100644
--- a/deps/rabbitmq_trust_store/Makefile
+++ b/deps/rabbitmq_trust_store/Makefile
@@ -25,5 +25,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_trust_store/erlang.mk b/deps/rabbitmq_trust_store/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_trust_store/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_trust_store/rabbitmq-components.mk b/deps/rabbitmq_trust_store/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_trust_store/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store.erl
index 6ebe2dc2ac..26806af821 100644
--- a/deps/rabbitmq_trust_store/src/rabbit_trust_store.erl
+++ b/deps/rabbitmq_trust_store/src/rabbit_trust_store.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_trust_store).
@@ -238,7 +238,7 @@ refresh_provider_certs(Provider, Config, ProviderState) ->
NewProviderState;
{error, Reason} ->
rabbit_log:error("Unable to load certificate list for provider ~p,"
- " reason: ~p~n",
+ " reason: ~p",
[Provider, Reason]),
ProviderState
end.
@@ -317,7 +317,7 @@ providers(Config) ->
{module, Provider} -> true;
{error, Error} ->
rabbit_log:warning("Unable to load trust store certificates"
- " with provider module ~p. Reason: ~p~n",
+ " with provider module ~p. Reason: ~p",
[Provider, Error]),
false
end
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store_app.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store_app.erl
index 5048eb60e5..a5ca630f07 100644
--- a/deps/rabbitmq_trust_store/src/rabbit_trust_store_app.erl
+++ b/deps/rabbitmq_trust_store/src/rabbit_trust_store_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_trust_store_app).
@@ -50,7 +50,7 @@ edit(Options) ->
Val ->
rabbit_log:warning("RabbitMQ trust store plugin is used "
"and the verify_fun TLS option is set: ~p. "
- "It will be overwritten by the plugin.~n", [Val]),
+ "It will be overwritten by the plugin.", [Val]),
ok
end,
%% Only enter those options neccessary for this application.
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store_certificate_provider.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store_certificate_provider.erl
index 10e3367ec5..cb45f21a74 100644
--- a/deps/rabbitmq_trust_store/src/rabbit_trust_store_certificate_provider.erl
+++ b/deps/rabbitmq_trust_store/src/rabbit_trust_store_certificate_provider.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_trust_store_certificate_provider).
@@ -28,4 +28,4 @@
when CertId :: term(),
Attributes :: list(),
Config :: list(),
- Cert :: public_key:der_encoded(). \ No newline at end of file
+ Cert :: public_key:der_encoded().
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store_file_provider.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store_file_provider.erl
index 820ad550dd..88cd55a535 100644
--- a/deps/rabbitmq_trust_store/src/rabbit_trust_store_file_provider.erl
+++ b/deps/rabbitmq_trust_store/src/rabbit_trust_store_file_provider.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_trust_store_file_provider).
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store_http_provider.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store_http_provider.erl
index 5b05e5a986..6c0838b8b7 100644
--- a/deps/rabbitmq_trust_store/src/rabbit_trust_store_http_provider.erl
+++ b/deps/rabbitmq_trust_store/src/rabbit_trust_store_http_provider.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_trust_store_http_provider).
diff --git a/deps/rabbitmq_trust_store/src/rabbit_trust_store_sup.erl b/deps/rabbitmq_trust_store/src/rabbit_trust_store_sup.erl
index f587d1e000..2b8a975be3 100644
--- a/deps/rabbitmq_trust_store/src/rabbit_trust_store_sup.erl
+++ b/deps/rabbitmq_trust_store/src/rabbit_trust_store_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_trust_store_sup).
diff --git a/deps/rabbitmq_trust_store/test/config_schema_SUITE.erl b/deps/rabbitmq_trust_store/test/config_schema_SUITE.erl
index a2f0e01a81..27271d43e3 100644
--- a/deps/rabbitmq_trust_store/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_trust_store/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_trust_store/test/system_SUITE.erl b/deps/rabbitmq_trust_store/test/system_SUITE.erl
index 78e74f3b57..e1940b7859 100644
--- a/deps/rabbitmq_trust_store/test/system_SUITE.erl
+++ b/deps/rabbitmq_trust_store/test/system_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
@@ -212,7 +212,7 @@ validation_success_for_AMQP_client1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Certificate2},
- {key, Key2} | cfg()], 1),
+ {key, Key2} | cfg()], 1, 1),
%% Then: a client presenting a certifcate rooted at the same
%% authority connects successfully.
@@ -245,7 +245,7 @@ validation_failure_for_AMQP_client1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
%% Then: a client presenting a certificate rooted with another
%% authority is REJECTED.
@@ -297,7 +297,7 @@ validate_chain1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
%% When: a client connects and present `RootTrusted` as well as the `CertTrusted`
%% Then: the connection is successful.
@@ -341,7 +341,7 @@ validate_longer_chain1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
%% When: a client connects and present `CertInter` as well as the `CertTrusted`
%% Then: the connection is successful.
@@ -439,7 +439,7 @@ validate_chain_without_whitelisted1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
%% When: Rabbit validates paths
%% Then: a client presenting the non-whitelisted certificate `CertUntrusted` and `RootUntrusted`
@@ -487,7 +487,7 @@ whitelisted_certificate_accepted_from_AMQP_client_regardless_of_validation_to_ro
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
%% Then: a client presenting the whitelisted certificate `C`
%% is allowed.
@@ -521,7 +521,7 @@ removed_certificate_denied_from_AMQP_client1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
wait_for_file_system_time(),
ok = delete("bob.pem", Config),
@@ -572,7 +572,7 @@ installed_certificate_accepted_from_AMQP_client1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
wait_for_file_system_time(),
ok = whitelist(Config, "charlie", CertOther, KeyOther),
@@ -619,7 +619,7 @@ whitelist_directory_DELTA1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
wait_for_file_system_time(),
ok = delete("bar.pem", Config),
@@ -680,7 +680,7 @@ replaced_whitelisted_certificate_should_be_accepted1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
%% And: the first certificate has been whitelisted
ok = whitelist(Config, "bart", CertFirst, KeyFirst),
rabbit_trust_store:refresh(),
@@ -789,7 +789,7 @@ ignore_corrupt_cert1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
%% Then: the trust store should keep functioning
%% And: a client presenting the whitelisted certificate `CertTrusted`
@@ -825,7 +825,7 @@ ignore_same_cert_with_different_name1(Config) ->
catch rabbit_networking:stop_tcp_listener(Port),
ok = rabbit_networking:start_ssl_listener(Port, [{cacerts, [Root]},
{cert, Cert},
- {key, Key} | cfg()], 1),
+ {key, Key} | cfg()], 1, 1),
%% Then: the trust store should keep functioning.
%% And: a client presenting the whitelisted certificate `CertTrusted`
diff --git a/deps/rabbitmq_web_dispatch/BUILD.bazel b/deps/rabbitmq_web_dispatch/BUILD.bazel
new file mode 100644
index 0000000000..096d91d0cd
--- /dev/null
+++ b/deps/rabbitmq_web_dispatch/BUILD.bazel
@@ -0,0 +1,88 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze", "plt")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+load(
+ "//:rabbitmq.bzl",
+ "APP_VERSION",
+ "RABBITMQ_DIALYZER_OPTS",
+ "assert_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_web_dispatch"
+
+APP_DESCRIPTION = "RabbitMQ Web Dispatcher"
+
+APP_MODULE = "rabbit_web_dispatch_app"
+
+EXTRA_APPS = [
+ "inets",
+]
+
+DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@cowboy//:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ app_version = APP_VERSION,
+ extra_apps = EXTRA_APPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+plt(
+ name = "base_plt",
+ apps = EXTRA_APPS,
+ plt = "//:base_plt",
+)
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = ":base_plt",
+ tags = ["dialyze"],
+)
+
+rabbitmq_home(
+ name = "broker-for-tests-home",
+ testonly = True,
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ ":test_bazel_erlang_lib",
+ ],
+)
+
+rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ testonly = True,
+ home = ":broker-for-tests-home",
+)
+
+PACKAGE = "deps/rabbitmq_web_dispatch"
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "rabbit_web_dispatch_SUITE",
+ data = [
+ "test/priv/www/index.html",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "rabbit_web_dispatch_unit_SUITE",
+ size = "medium",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_web_dispatch/CONTRIBUTING.md b/deps/rabbitmq_web_dispatch/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_web_dispatch/CONTRIBUTING.md
+++ b/deps/rabbitmq_web_dispatch/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_web_dispatch/Makefile b/deps/rabbitmq_web_dispatch/Makefile
index a451bf2979..fd377840ab 100644
--- a/deps/rabbitmq_web_dispatch/Makefile
+++ b/deps/rabbitmq_web_dispatch/Makefile
@@ -21,6 +21,6 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
+include ../../rabbitmq-components.mk
TEST_DEPS := $(filter-out rabbitmq_test,$(TEST_DEPS))
-include erlang.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_web_dispatch/erlang.mk b/deps/rabbitmq_web_dispatch/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_web_dispatch/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_web_dispatch/rabbitmq-components.mk b/deps/rabbitmq_web_dispatch/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_web_dispatch/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_middleware.erl b/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_middleware.erl
index c8186619f6..836df9b03f 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_middleware.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_middleware.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_cowboy_middleware).
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_redirect.erl b/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_redirect.erl
index 9271567d91..3844aad072 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_redirect.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_redirect.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_cowboy_redirect).
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_stream_h.erl b/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_stream_h.erl
index d51086a69d..b71baca1de 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_stream_h.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_cowboy_stream_h.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_cowboy_stream_h).
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch.erl b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch.erl
index 24a81a986a..0f86f4566d 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_dispatch).
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_app.erl b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_app.erl
index 1de05c77d7..ff0eff8686 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_app.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_dispatch_app).
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_listing_handler.erl b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_listing_handler.erl
index 93ee349af1..10cef26004 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_listing_handler.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_listing_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_dispatch_listing_handler).
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_registry.erl b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_registry.erl
index 8d933ac668..2782369528 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_registry.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_registry.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_dispatch_registry).
@@ -116,7 +116,7 @@ handle_call(list_all, _From, undefined) ->
{reply, list(), undefined};
handle_call(Req, _From, State) ->
- rabbit_log:error("Unexpected call to ~p: ~p~n", [?MODULE, Req]),
+ rabbit_log:error("Unexpected call to ~p: ~p", [?MODULE, Req]),
{stop, unknown_request, State}.
handle_cast(_, State) ->
@@ -157,9 +157,9 @@ listener_info(Listener) ->
P
end,
Port = pget(port, Listener),
- [{Protocol, IPAddress, Port}
- || {IPAddress, _Port, _Family}
- <- rabbit_networking:tcp_listener_addresses(Port)].
+ [{IPAddress, _Port, _Family} | _]
+ = rabbit_networking:tcp_listener_addresses(Port),
+ [{Protocol, IPAddress, Port}].
lookup_dispatch(Lsnr) ->
case ets:lookup(?ETS, pget(port, Lsnr)) of
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_sup.erl b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_sup.erl
index 1d270550c0..ac649c87ea 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_sup.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_dispatch_sup).
@@ -39,7 +39,7 @@ ensure_listener(Listener) ->
[rabbit_cowboy_middleware, cowboy_router, cowboy_handler],
stream_handlers => StreamHandlers},
ProtoOptsMap),
- Child = ranch:child_spec(rabbit_networking:ranch_ref(Listener), 100,
+ Child = ranch:child_spec(rabbit_networking:ranch_ref(Listener),
Transport, TransportOpts,
cowboy_clear, CowboyOptsMap),
case supervisor:start_child(?SUP, Child) of
@@ -51,18 +51,18 @@ ensure_listener(Listener) ->
stop_listener(Listener) ->
Name = rabbit_networking:ranch_ref(Listener),
- ok = supervisor:terminate_child(?SUP, {ranch_listener_sup, Name}),
- ok = supervisor:delete_child(?SUP, {ranch_listener_sup, Name}).
+ ok = supervisor:terminate_child(?SUP, {ranch_embedded_sup, Name}),
+ ok = supervisor:delete_child(?SUP, {ranch_embedded_sup, Name}).
%% @spec init([[instance()]]) -> SupervisorTree
%% @doc supervisor callback.
init([]) ->
Registry = {rabbit_web_dispatch_registry,
{rabbit_web_dispatch_registry, start_link, []},
- transient, 5000, worker, dynamic},
+ transient, 5000, worker, [rabbit_web_dispatch_registry]},
Log = {rabbit_mgmt_access_logger, {gen_event, start_link,
[{local, webmachine_log_event}]},
- permanent, 5000, worker, [dynamic]},
+ permanent, 5000, worker, dynamic},
{ok, {{one_for_one, 10, 10}, [Registry, Log]}}.
%%
diff --git a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_util.erl b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_util.erl
index 8aca673a48..d15d3f6000 100644
--- a/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_util.erl
+++ b/deps/rabbitmq_web_dispatch/src/rabbit_web_dispatch_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_dispatch_util).
diff --git a/deps/rabbitmq_web_dispatch/src/webmachine_log.erl b/deps/rabbitmq_web_dispatch/src/webmachine_log.erl
index dd4da91924..73f67b9c2f 100644
--- a/deps/rabbitmq_web_dispatch/src/webmachine_log.erl
+++ b/deps/rabbitmq_web_dispatch/src/webmachine_log.erl
@@ -131,7 +131,7 @@ log_access({_, _, _}=LogData) ->
%% @doc Close a log file.
-spec log_close(atom(), string(), file:io_device()) -> ok | {error, term()}.
log_close(Mod, Name, FD) ->
- error_logger:info_msg("~p: closing log file: ~p~n", [Mod, Name]),
+ logger:info("~p: closing log file: ~p", [Mod, Name]),
file:close(FD).
%% @doc Open a new log file for writing
@@ -144,7 +144,7 @@ log_open(FileName) ->
-spec log_open(string(), non_neg_integer()) -> file:io_device().
log_open(FileName, DateHour) ->
LogName = FileName ++ suffix(DateHour),
- error_logger:info_msg("opening log file: ~p~n", [LogName]),
+ logger:info("opening log file: ~p", [LogName]),
filelib:ensure_dir(LogName),
{ok, FD} = file:open(LogName, [read, write, raw]),
{ok, Location} = file:position(FD, eof),
diff --git a/deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_SUITE.erl b/deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_SUITE.erl
index 5975272e1a..115c0cfbac 100644
--- a/deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_SUITE.erl
+++ b/deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_dispatch_SUITE).
@@ -71,12 +71,9 @@ query_static_resource_test1(Host, Port) ->
rabbit_web_dispatch:register_static_context(test, [{port, Port}],
"rabbit_web_dispatch_test",
?MODULE, "test/priv/www", "Test"),
- inets:start(),
{ok, {_Status, _Headers, Body}} =
httpc:request(format("http://~s:~w/rabbit_web_dispatch_test/index.html", [Host, Port])),
- ?assert(string:str(Body, "RabbitMQ HTTP Server Test Page") /= 0),
-
- passed.
+ ?assertMatch(I when I > 0, string:str(Body, "RabbitMQ HTTP Server Test Page")).
add_idempotence_test(Config) ->
Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_http_extra),
diff --git a/deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_unit_SUITE.erl b/deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_unit_SUITE.erl
index a71dabe014..ca0043acc9 100644
--- a/deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_unit_SUITE.erl
+++ b/deps/rabbitmq_web_dispatch/test/rabbit_web_dispatch_unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_dispatch_unit_SUITE).
diff --git a/deps/rabbitmq_web_mqtt/BUILD.bazel b/deps/rabbitmq_web_mqtt/BUILD.bazel
new file mode 100644
index 0000000000..a2322a10c0
--- /dev/null
+++ b/deps/rabbitmq_web_mqtt/BUILD.bazel
@@ -0,0 +1,99 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_web_mqtt"
+
+APP_DESCRIPTION = "RabbitMQ MQTT-over-WebSockets adapter"
+
+APP_MODULE = "rabbit_web_mqtt_app"
+
+APP_ENV = """[
+ {tcp_config, [{port, 15675}]},
+ {ssl_config, []},
+ {num_tcp_acceptors, 10},
+ {num_ssl_acceptors, 10},
+ {cowboy_opts, []},
+ {proxy_protocol, false}
+ ]"""
+
+BUILD_DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "@ranch//:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "@cowboy//:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_mqtt:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_web_mqtt"
+
+erlc(
+ name = "test_utils",
+ srcs = glob(["test/src/*.erl"]),
+ hdrs = glob(["test/src/*.hrl"]),
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+)
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "proxy_protocol_SUITE",
+ additional_beam = [":test_utils"],
+ additional_hdrs = glob(["test/src/*.hrl"]),
+ erlc_opts = [
+ "-I deps/rabbitmq_web_mqtt/test",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "system_SUITE",
+ additional_beam = [":test_utils"],
+ additional_hdrs = glob(["test/src/*.hrl"]),
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_web_mqtt/Makefile b/deps/rabbitmq_web_mqtt/Makefile
index 6d38319759..7084d2f15e 100644
--- a/deps/rabbitmq_web_mqtt/Makefile
+++ b/deps/rabbitmq_web_mqtt/Makefile
@@ -29,5 +29,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_web_mqtt/erlang.mk b/deps/rabbitmq_web_mqtt/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_web_mqtt/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_web_mqtt/rabbitmq-components.mk b/deps/rabbitmq_web_mqtt/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_web_mqtt/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_app.erl b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_app.erl
index 00b80bf562..ba6c8919b6 100644
--- a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_app.erl
+++ b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_mqtt_app).
@@ -17,7 +17,7 @@
]).
%% Dummy supervisor - see Ulf Wiger's comment at
-%% http://erlang.2086793.n4.nabble.com/initializing-library-applications-without-processes-td2094473.html
+%% http://erlang.org/pipermail/erlang-questions/2010-April/050508.html
-behaviour(supervisor).
-export([init/1]).
@@ -86,7 +86,8 @@ mqtt_init() ->
]}]),
CowboyOpts = CowboyOpts0#{env => #{dispatch => Routes},
middlewares => [cowboy_router, rabbit_web_mqtt_middleware, cowboy_handler],
- proxy_header => get_env(proxy_protocol, false)},
+ proxy_header => get_env(proxy_protocol, false),
+ stream_handlers => [rabbit_web_mqtt_stream_handler, cowboy_stream_h]},
case get_env(tcp_config, []) of
[] -> ok;
TCPConf0 -> start_tcp_listener(TCPConf0, CowboyOpts)
@@ -103,7 +104,8 @@ start_tcp_listener(TCPConf0, CowboyOpts) ->
socket_opts => TCPConf,
connection_type => supervisor,
max_connections => get_max_connections(),
- num_acceptors => get_env(num_tcp_acceptors, 10)
+ num_acceptors => get_env(num_tcp_acceptors, 10),
+ num_conns_sups => get_env(num_conns_sup, 1)
},
case ranch:start_listener(rabbit_networking:ranch_ref(TCPConf),
ranch_tcp,
@@ -115,12 +117,12 @@ start_tcp_listener(TCPConf0, CowboyOpts) ->
{error, ErrTCP} ->
rabbit_log_connection:error(
"Failed to start a WebSocket (HTTP) listener. Error: ~p,"
- " listener settings: ~p~n",
+ " listener settings: ~p",
[ErrTCP, TCPConf]),
throw(ErrTCP)
end,
listener_started(?TCP_PROTOCOL, TCPConf),
- rabbit_log:info("rabbit_web_mqtt: listening for HTTP connections on ~s:~w~n",
+ rabbit_log:info("rabbit_web_mqtt: listening for HTTP connections on ~s:~w",
[IpStr, Port]).
start_tls_listener(TLSConf0, CowboyOpts) ->
@@ -130,7 +132,8 @@ start_tls_listener(TLSConf0, CowboyOpts) ->
socket_opts => TLSConf,
connection_type => supervisor,
max_connections => get_max_connections(),
- num_acceptors => get_env(num_ssl_acceptors, 10)
+ num_acceptors => get_env(num_ssl_acceptors, 10),
+ num_conns_sups => get_env(num_conns_sup, 1)
},
case ranch:start_listener(rabbit_networking:ranch_ref(TLSConf),
ranch_ssl,
@@ -142,12 +145,12 @@ start_tls_listener(TLSConf0, CowboyOpts) ->
{error, ErrTLS} ->
rabbit_log_connection:error(
"Failed to start a TLS WebSocket (HTTPS) listener. Error: ~p,"
- " listener settings: ~p~n",
+ " listener settings: ~p",
[ErrTLS, TLSConf]),
throw(ErrTLS)
end,
listener_started(?TLS_PROTOCOL, TLSConf),
- rabbit_log:info("rabbit_web_mqtt: listening for HTTPS connections on ~s:~w~n",
+ rabbit_log:info("rabbit_web_mqtt: listening for HTTPS connections on ~s:~w",
[TLSIpStr, TLSPort]).
listener_started(Protocol, Listener) ->
diff --git a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_info.erl b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_info.erl
index cb71ac6b76..7a8dccac57 100644
--- a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_info.erl
+++ b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_info.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_mqtt_connection_info).
@@ -23,4 +23,4 @@ additional_authn_params(_Creds, _VHost, _Pid, Infos) ->
end;
_ ->
[]
- end. \ No newline at end of file
+ end.
diff --git a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_sup.erl b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_sup.erl
index 9ac87a4260..53ed3b9a09 100644
--- a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_sup.erl
+++ b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_connection_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_mqtt_connection_sup).
@@ -12,13 +12,13 @@
-include_lib("rabbit_common/include/rabbit.hrl").
--export([start_link/4, start_keepalive_link/0]).
+-export([start_link/3, start_keepalive_link/0]).
-export([init/1]).
%%----------------------------------------------------------------------------
-start_link(Ref, Sock, Transport, CowboyOpts0) ->
+start_link(Ref, Transport, CowboyOpts0) ->
{ok, SupPid} = supervisor2:start_link(?MODULE, []),
{ok, KeepaliveSup} = supervisor2:start_child(
SupPid,
@@ -31,8 +31,7 @@ start_link(Ref, Sock, Transport, CowboyOpts0) ->
%% then have the middleware rabbit_web_mqtt_middleware place it
%% in the initial handler state.
Env = maps:get(env, CowboyOpts0),
- CowboyOpts = CowboyOpts0#{env => Env#{keepalive_sup => KeepaliveSup,
- socket => Sock}},
+ CowboyOpts = CowboyOpts0#{env => Env#{keepalive_sup => KeepaliveSup}},
Protocol = case Transport of
ranch_tcp -> cowboy_clear;
ranch_ssl -> cowboy_tls
@@ -40,7 +39,7 @@ start_link(Ref, Sock, Transport, CowboyOpts0) ->
{ok, ReaderPid} = supervisor2:start_child(
SupPid,
{Protocol,
- {Protocol, start_link, [Ref, Sock, Transport, CowboyOpts]},
+ {Protocol, start_link, [Ref, Transport, CowboyOpts]},
intrinsic, ?WORKER_WAIT, worker, [Protocol]}),
{ok, SupPid, ReaderPid}.
diff --git a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_handler.erl b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_handler.erl
index d7d1a3b499..af568777ec 100644
--- a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_handler.erl
+++ b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_handler.erl
@@ -2,11 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_mqtt_handler).
-behaviour(cowboy_websocket).
+-behaviour(cowboy_sub_protocol).
-export([
init/2,
@@ -17,6 +18,11 @@
]).
-export([close_connection/2]).
+%% cowboy_sub_protocol
+-export([upgrade/4,
+ upgrade/5,
+ takeover/7]).
+
-include_lib("amqp_client/include/amqp_client.hrl").
-record(state, {
@@ -33,55 +39,71 @@
connection
}).
+%% cowboy_sub_protcol
+upgrade(Req, Env, Handler, HandlerState) ->
+ upgrade(Req, Env, Handler, HandlerState, #{}).
+
+upgrade(Req, Env, Handler, HandlerState, Opts) ->
+ cowboy_websocket:upgrade(Req, Env, Handler, HandlerState, Opts).
+
+takeover(Parent, Ref, Socket, Transport, Opts, Buffer, {Handler, HandlerState}) ->
+ Sock = case HandlerState#state.socket of
+ undefined ->
+ Socket;
+ ProxyInfo ->
+ {rabbit_proxy_socket, Socket, ProxyInfo}
+ end,
+ cowboy_websocket:takeover(Parent, Ref, Socket, Transport, Opts, Buffer,
+ {Handler, HandlerState#state{socket = Sock}}).
+
+%% cowboy_websocket
init(Req, Opts) ->
{PeerAddr, _PeerPort} = maps:get(peer, Req),
{_, KeepaliveSup} = lists:keyfind(keepalive_sup, 1, Opts),
- {_, Sock0} = lists:keyfind(socket, 1, Opts),
- Sock = case maps:get(proxy_header, Req, undefined) of
- undefined ->
- Sock0;
- ProxyInfo ->
- {rabbit_proxy_socket, Sock0, ProxyInfo}
- end,
+ SockInfo = maps:get(proxy_header, Req, undefined),
+ WsOpts0 = proplists:get_value(ws_opts, Opts, #{}),
+ WsOpts = maps:merge(#{compress => true}, WsOpts0),
+ Req2 = case cowboy_req:header(<<"sec-websocket-protocol">>, Req) of
+ undefined -> Req;
+ SecWsProtocol ->
+ cowboy_req:set_resp_header(<<"sec-websocket-protocol">>, SecWsProtocol, Req)
+ end,
+ {?MODULE, Req2, #state{
+ keepalive = {none, none},
+ keepalive_sup = KeepaliveSup,
+ parse_state = rabbit_mqtt_frame:initial_state(),
+ state = running,
+ conserve_resources = false,
+ socket = SockInfo,
+ peername = PeerAddr
+ }, WsOpts}.
+
+websocket_init(State0 = #state{socket = Sock, peername = PeerAddr}) ->
+ ok = file_handle_cache:obtain(),
case rabbit_net:connection_string(Sock, inbound) of
{ok, ConnStr} ->
- Req2 = case cowboy_req:header(<<"sec-websocket-protocol">>, Req) of
- undefined -> Req;
- SecWsProtocol ->
- cowboy_req:set_resp_header(<<"sec-websocket-protocol">>, SecWsProtocol, Req)
- end,
- WsOpts0 = proplists:get_value(ws_opts, Opts, #{}),
- WsOpts = maps:merge(#{compress => true}, WsOpts0),
- {cowboy_websocket, Req2, #state{
- conn_name = ConnStr,
- keepalive = {none, none},
- keepalive_sup = KeepaliveSup,
- parse_state = rabbit_mqtt_frame:initial_state(),
- state = running,
- conserve_resources = false,
- socket = Sock,
- peername = PeerAddr
- }, WsOpts};
+ State = State0#state{
+ conn_name = ConnStr,
+ socket = Sock
+ },
+ rabbit_log_connection:info("accepting Web MQTT connection ~p (~s)", [self(), ConnStr]),
+ AdapterInfo = amqp_connection:socket_adapter_info(Sock, {'Web MQTT', "N/A"}),
+ RealSocket = rabbit_net:unwrap_socket(Sock),
+ ProcessorState = rabbit_mqtt_processor:initial_state(Sock,
+ rabbit_mqtt_reader:ssl_login_name(RealSocket),
+ AdapterInfo,
+ fun send_reply/2,
+ PeerAddr),
+ process_flag(trap_exit, true),
+ {ok,
+ rabbit_event:init_stats_timer(
+ State#state{proc_state = ProcessorState},
+ #state.stats_timer),
+ hibernate};
_ ->
- {stop, Req}
+ {stop, State0}
end.
-websocket_init(State = #state{conn_name = ConnStr, socket = Sock, peername = PeerAddr}) ->
- rabbit_log_connection:info("accepting Web MQTT connection ~p (~s)~n", [self(), ConnStr]),
- AdapterInfo = amqp_connection:socket_adapter_info(Sock, {'Web MQTT', "N/A"}),
- RealSocket = rabbit_net:unwrap_socket(Sock),
- ProcessorState = rabbit_mqtt_processor:initial_state(Sock,
- rabbit_mqtt_reader:ssl_login_name(RealSocket),
- AdapterInfo,
- fun send_reply/2,
- PeerAddr),
- process_flag(trap_exit, true),
- {ok,
- rabbit_event:init_stats_timer(
- State#state{proc_state = ProcessorState},
- #state.stats_timer),
- hibernate}.
-
-spec close_connection(pid(), string()) -> 'ok'.
close_connection(Pid, Reason) ->
rabbit_log_connection:info("Web MQTT: will terminate connection process ~p, reason: ~s",
@@ -98,7 +120,7 @@ websocket_handle(Ping, State) when Ping =:= ping; Ping =:= pong ->
{ok, State, hibernate};
%% Log any other unexpected frames.
websocket_handle(Frame, State) ->
- rabbit_log_connection:info("Web MQTT: unexpected WebSocket frame ~p~n",
+ rabbit_log_connection:info("Web MQTT: unexpected WebSocket frame ~p",
[Frame]),
{ok, State, hibernate}.
@@ -133,12 +155,12 @@ websocket_info({'EXIT', _, _}, State) ->
stop(State);
websocket_info({'$gen_cast', duplicate_id}, State = #state{ proc_state = ProcState,
conn_name = ConnName }) ->
- rabbit_log_connection:warning("Web MQTT disconnecting a client with duplicate ID '~s' (~p)~n",
+ rabbit_log_connection:warning("Web MQTT disconnecting a client with duplicate ID '~s' (~p)",
[rabbit_mqtt_processor:info(client_id, ProcState), ConnName]),
stop(State);
websocket_info({'$gen_cast', {close_connection, Reason}}, State = #state{ proc_state = ProcState,
conn_name = ConnName }) ->
- rabbit_log_connection:warning("Web MQTT disconnecting client with ID '~s' (~p), reason: ~s~n",
+ rabbit_log_connection:warning("Web MQTT disconnecting client with ID '~s' (~p), reason: ~s",
[rabbit_mqtt_processor:info(client_id, ProcState), ConnName, Reason]),
stop(State);
websocket_info({start_keepalives, Keepalive},
@@ -151,14 +173,14 @@ websocket_info({start_keepalives, Keepalive},
KeepaliveSup, Sock, 0, SendFun, Keepalive, ReceiveFun),
{ok, State #state { keepalive = Heartbeater }, hibernate};
websocket_info(keepalive_timeout, State = #state{conn_name = ConnStr}) ->
- rabbit_log_connection:error("closing Web MQTT connection ~p (keepalive timeout)~n", [ConnStr]),
+ rabbit_log_connection:error("closing Web MQTT connection ~p (keepalive timeout)", [ConnStr]),
stop(State);
websocket_info(emit_stats, State) ->
{ok, emit_stats(State), hibernate};
websocket_info({ra_event, _, _}, State) ->
{ok, State, hibernate};
websocket_info(Msg, State) ->
- rabbit_log_connection:info("Web MQTT: unexpected message ~p~n",
+ rabbit_log_connection:info("Web MQTT: unexpected message ~p",
[Msg]),
{ok, State, hibernate}.
@@ -199,7 +221,7 @@ handle_data1(Data, State = #state{ parse_state = ParseState,
proc_state = ProcState1,
connection = ConnPid });
{error, Reason, _} ->
- rabbit_log_connection:info("MQTT protocol error ~p for connection ~p~n",
+ rabbit_log_connection:info("MQTT protocol error ~p for connection ~p",
[Reason, ConnStr]),
stop(State, 1002, Reason);
{error, Error} ->
@@ -215,13 +237,14 @@ stop(State) ->
stop(State, 1000, "MQTT died").
stop(State, CloseCode, Error0) ->
+ ok = file_handle_cache:release(),
stop_rabbit_mqtt_processor(State),
Error1 = rabbit_data_coercion:to_binary(Error0),
{[{close, CloseCode, Error1}], State}.
stop_with_framing_error(State, Error0, ConnStr) ->
Error1 = rabbit_misc:format("~p", [Error0]),
- rabbit_log_connection:error("MQTT detected framing error '~s' for connection ~p~n",
+ rabbit_log_connection:error("MQTT detected framing error '~s' for connection ~p",
[Error1, ConnStr]),
stop(State, 1007, Error1).
@@ -229,7 +252,7 @@ stop_rabbit_mqtt_processor(State = #state{state = running,
proc_state = ProcState,
conn_name = ConnName}) ->
maybe_emit_stats(State),
- rabbit_log_connection:info("closing Web MQTT connection ~p (~s)~n", [self(), ConnName]),
+ rabbit_log_connection:info("closing Web MQTT connection ~p (~s)", [self(), ConnName]),
rabbit_mqtt_processor:send_will(ProcState),
rabbit_mqtt_processor:close_connection(ProcState).
diff --git a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_middleware.erl b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_middleware.erl
index 7bc92099c6..840f45265a 100644
--- a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_middleware.erl
+++ b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_middleware.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_mqtt_middleware).
@@ -12,11 +12,9 @@
execute(Req, Env) ->
#{keepalive_sup := KeepaliveSup} = Env,
- Sock = maps:get(socket, Env),
case maps:get(handler_opts, Env, undefined) of
undefined -> {ok, Req, Env};
Opts when is_list(Opts) ->
- {ok, Req, Env#{handler_opts => [{keepalive_sup, KeepaliveSup},
- {socket, Sock}
+ {ok, Req, Env#{handler_opts => [{keepalive_sup, KeepaliveSup}
|Opts]}}
end.
diff --git a/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_stream_handler.erl b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_stream_handler.erl
new file mode 100644
index 0000000000..3a3d343600
--- /dev/null
+++ b/deps/rabbitmq_web_mqtt/src/rabbit_web_mqtt_stream_handler.erl
@@ -0,0 +1,41 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+-module(rabbit_web_mqtt_stream_handler).
+
+-behavior(cowboy_stream).
+
+-export([init/3]).
+-export([data/4]).
+-export([info/3]).
+-export([terminate/3]).
+-export([early_error/5]).
+
+
+-record(state, {next}).
+
+init(StreamID, Req, Opts) ->
+ {Commands, Next} = cowboy_stream:init(StreamID, Req, Opts),
+ {Commands, #state{next = Next}}.
+
+data(StreamID, IsFin, Data, State = #state{next = Next0}) ->
+ {Commands, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0),
+ {Commands, State#state{next = Next}}.
+
+info(StreamID, {switch_protocol, Headers, _, InitialState}, State) ->
+ do_info(StreamID, {switch_protocol, Headers, rabbit_web_mqtt_handler, InitialState}, State);
+info(StreamID, Info, State) ->
+ do_info(StreamID, Info, State).
+
+do_info(StreamID, Info, State = #state{next = Next0}) ->
+ {Commands, Next} = cowboy_stream:info(StreamID, Info, Next0),
+ {Commands, State#state{next = Next}}.
+
+terminate(StreamID, Reason, #state{next = Next}) ->
+ cowboy_stream:terminate(StreamID, Reason, Next).
+
+early_error(StreamID, Reason, PartialReq, Resp, Opts) ->
+ cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts).
diff --git a/deps/rabbitmq_web_mqtt/test/config_schema_SUITE.erl b/deps/rabbitmq_web_mqtt/test/config_schema_SUITE.erl
index 7d4211797c..410239d493 100644
--- a/deps/rabbitmq_web_mqtt/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_web_mqtt/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_web_mqtt/test/proxy_protocol_SUITE.erl b/deps/rabbitmq_web_mqtt/test/proxy_protocol_SUITE.erl
index 9c6b9355e4..423e12cdf0 100644
--- a/deps/rabbitmq_web_mqtt/test/proxy_protocol_SUITE.erl
+++ b/deps/rabbitmq_web_mqtt/test/proxy_protocol_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(proxy_protocol_SUITE).
@@ -94,7 +94,7 @@ proxy_protocol(Config) ->
{binary, _P} = rfc6455_client:recv(WS),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
{close, _} = rfc6455_client:close(WS),
ok.
diff --git a/deps/rabbitmq_web_mqtt/test/src/rabbit_ws_test_util.erl b/deps/rabbitmq_web_mqtt/test/src/rabbit_ws_test_util.erl
index fcc91f6e13..f72d6fab69 100644
--- a/deps/rabbitmq_web_mqtt/test/src/rabbit_ws_test_util.erl
+++ b/deps/rabbitmq_web_mqtt/test/src/rabbit_ws_test_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_ws_test_util).
diff --git a/deps/rabbitmq_web_mqtt/test/src/rfc6455_client.erl b/deps/rabbitmq_web_mqtt/test/src/rfc6455_client.erl
index f9650fd4a6..ce486dca38 100644
--- a/deps/rabbitmq_web_mqtt/test/src/rfc6455_client.erl
+++ b/deps/rabbitmq_web_mqtt/test/src/rfc6455_client.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rfc6455_client).
diff --git a/deps/rabbitmq_web_mqtt/test/src/system_SUITE.erl b/deps/rabbitmq_web_mqtt/test/system_SUITE.erl
index e91efda12c..d4fe382b7a 100644
--- a/deps/rabbitmq_web_mqtt/test/src/system_SUITE.erl
+++ b/deps/rabbitmq_web_mqtt/test/system_SUITE.erl
@@ -2,13 +2,13 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(system_SUITE).
-include_lib("eunit/include/eunit.hrl").
--include("emqttc_packet.hrl").
+-include("src/emqttc_packet.hrl").
-compile(export_all).
diff --git a/deps/rabbitmq_web_mqtt_examples/BUILD.bazel b/deps/rabbitmq_web_mqtt_examples/BUILD.bazel
new file mode 100644
index 0000000000..ffb91281b2
--- /dev/null
+++ b/deps/rabbitmq_web_mqtt_examples/BUILD.bazel
@@ -0,0 +1,40 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_web_mqtt_examples"
+
+APP_DESCRIPTION = "Rabbit WEB-MQTT - examples"
+
+APP_MODULE = "rabbit_web_mqtt_examples_app"
+
+APP_ENV = """[
+ {listener, [{port, 15670}]}
+ ]"""
+
+RUNTIME_DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_web_dispatch:bazel_erlang_lib",
+ "//deps/rabbitmq_web_mqtt:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
diff --git a/deps/rabbitmq_web_mqtt_examples/CONTRIBUTING.md b/deps/rabbitmq_web_mqtt_examples/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_web_mqtt_examples/CONTRIBUTING.md
+++ b/deps/rabbitmq_web_mqtt_examples/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_web_mqtt_examples/Makefile b/deps/rabbitmq_web_mqtt_examples/Makefile
index 8333d61a63..2362ec929e 100644
--- a/deps/rabbitmq_web_mqtt_examples/Makefile
+++ b/deps/rabbitmq_web_mqtt_examples/Makefile
@@ -19,5 +19,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_web_mqtt_examples/erlang.mk b/deps/rabbitmq_web_mqtt_examples/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_web_mqtt_examples/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_web_mqtt_examples/priv/echo.html b/deps/rabbitmq_web_mqtt_examples/priv/echo.html
index 18ccb324ae..fc2f9e1b8f 100644
--- a/deps/rabbitmq_web_mqtt_examples/priv/echo.html
+++ b/deps/rabbitmq_web_mqtt_examples/priv/echo.html
@@ -45,7 +45,7 @@
<link href="main.css" rel="stylesheet" type="text/css"/>
</head>
<body lang="en">
- <h1>RabbitMQ Web MQTT Example</h1>
+ <h1><a href="index.html">RabbitMQ Web MQTT Examples</a> > Echo Server</h1>
<div id="first" class="box">
<h2>Received</h2>
diff --git a/deps/rabbitmq_web_mqtt_examples/rabbitmq-components.mk b/deps/rabbitmq_web_mqtt_examples/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_web_mqtt_examples/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_web_mqtt_examples/src/rabbit_web_mqtt_examples_app.erl b/deps/rabbitmq_web_mqtt_examples/src/rabbit_web_mqtt_examples_app.erl
index d0e676fd60..3f6930416a 100644
--- a/deps/rabbitmq_web_mqtt_examples/src/rabbit_web_mqtt_examples_app.erl
+++ b/deps/rabbitmq_web_mqtt_examples/src/rabbit_web_mqtt_examples_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_mqtt_examples_app).
@@ -11,7 +11,7 @@
-export([start/2,stop/1]).
%% Dummy supervisor - see Ulf Wiger's comment at
-%% http://erlang.2086793.n4.nabble.com/initializing-library-applications-without-processes-td2094473.html
+%% http://erlang.org/pipermail/erlang-questions/2010-April/050508.html
-behaviour(supervisor).
-export([init/1]).
diff --git a/deps/rabbitmq_web_stomp/BUILD.bazel b/deps/rabbitmq_web_stomp/BUILD.bazel
new file mode 100644
index 0000000000..3ebf5515e3
--- /dev/null
+++ b/deps/rabbitmq_web_stomp/BUILD.bazel
@@ -0,0 +1,119 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlc")
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "RABBITMQ_TEST_ERLC_OPTS",
+ "assert_suites",
+ "broker_for_integration_suites",
+ "rabbitmq_integration_suite",
+ "rabbitmq_lib",
+ "rabbitmq_suite",
+)
+
+APP_NAME = "rabbitmq_web_stomp"
+
+APP_DESCRIPTION = "RabbitMQ STOMP-over-WebSockets support"
+
+APP_MODULE = "rabbit_web_stomp_app"
+
+APP_ENV = """[
+ {tcp_config, [{port, 15674}]},
+ {ssl_config, []},
+ {num_tcp_acceptors, 10},
+ {num_ssl_acceptors, 10},
+ {cowboy_opts, []},
+ {proxy_protocol, false},
+ {ws_frame, text},
+ {use_http_auth, false}
+ ]"""
+
+BUILD_DEPS = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "@ranch//:bazel_erlang_lib",
+]
+
+DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_stomp:bazel_erlang_lib",
+ "@cowboy//:bazel_erlang_lib",
+]
+
+RUNTIME_DEPS = [
+ "//deps/rabbit:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ build_deps = BUILD_DEPS,
+ runtime_deps = RUNTIME_DEPS,
+ deps = DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
+
+broker_for_integration_suites()
+
+PACKAGE = "deps/rabbitmq_web_stomp"
+
+erlc(
+ name = "test_util",
+ testonly = True,
+ srcs = glob(["test/src/*.erl"]),
+ dest = "test",
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+)
+
+suites = [
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "amqp_stomp_SUITE",
+ additional_beam = [
+ ":test_util",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "config_schema_SUITE",
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "cowboy_websocket_SUITE",
+ additional_beam = [
+ ":test_util",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "proxy_protocol_SUITE",
+ additional_beam = [
+ ":test_util",
+ ],
+ ),
+ rabbitmq_integration_suite(
+ PACKAGE,
+ name = "raw_websocket_SUITE",
+ additional_beam = [
+ ":test_util",
+ ],
+ ),
+ rabbitmq_suite(
+ name = "unit_SUITE",
+ size = "small",
+ ),
+]
+
+assert_suites(
+ suites,
+ glob(["test/**/*_SUITE.erl"]),
+)
diff --git a/deps/rabbitmq_web_stomp/CONTRIBUTING.md b/deps/rabbitmq_web_stomp/CONTRIBUTING.md
index 23a92fef9c..9722e973fb 100644
--- a/deps/rabbitmq_web_stomp/CONTRIBUTING.md
+++ b/deps/rabbitmq_web_stomp/CONTRIBUTING.md
@@ -13,7 +13,7 @@ The process is fairly standard:
* Create a branch with a descriptive name in the relevant repositories
* Make your changes, run tests, commit with a [descriptive message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork
* Submit pull requests with an explanation what has been changed and **why**
- * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below)
+ * Submit a filled out and signed [Contributor Agreement](https://cla.pivotal.io/) if needed (see below)
* Be patient. We will get to your pull request eventually
If what you are going to work on is a substantial change, please first ask the core team
@@ -28,7 +28,7 @@ See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
## Contributor Agreement
If you want to contribute a non-trivial change, please submit a signed copy of our
-[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time
+[Contributor Agreement](https://cla.pivotal.io/) around the time
you submit your pull request. This will make it much easier (in some cases, possible)
for the RabbitMQ team at Pivotal to merge your contribution.
diff --git a/deps/rabbitmq_web_stomp/Makefile b/deps/rabbitmq_web_stomp/Makefile
index 90724d9442..c249da064d 100644
--- a/deps/rabbitmq_web_stomp/Makefile
+++ b/deps/rabbitmq_web_stomp/Makefile
@@ -35,5 +35,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_web_stomp/erlang.mk b/deps/rabbitmq_web_stomp/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_web_stomp/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_web_stomp/rabbitmq-components.mk b/deps/rabbitmq_web_stomp/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_web_stomp/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_app.erl b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_app.erl
index 61441f077a..ee5324acbd 100644
--- a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_app.erl
+++ b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_stomp_app).
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_connection_sup.erl b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_connection_sup.erl
index 0703681482..771653fa84 100644
--- a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_connection_sup.erl
+++ b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_connection_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_stomp_connection_sup).
@@ -12,12 +12,12 @@
-include_lib("rabbit_common/include/rabbit.hrl").
--export([start_link/4, start_keepalive_link/0]).
+-export([start_link/3, start_keepalive_link/0]).
-export([init/1]).
%%----------------------------------------------------------------------------
-start_link(Ref, Sock, Transport, CowboyOpts0) ->
+start_link(Ref, Transport, CowboyOpts0) ->
{ok, SupPid} = supervisor2:start_link(?MODULE, []),
{ok, KeepaliveSup} = supervisor2:start_child(
SupPid,
@@ -29,8 +29,8 @@ start_link(Ref, Sock, Transport, CowboyOpts0) ->
%% then have the middleware rabbit_web_mqtt_middleware place it
%% in the initial handler state.
Env = maps:get(env, CowboyOpts0),
- CowboyOpts = CowboyOpts0#{env => Env#{keepalive_sup => KeepaliveSup,
- socket => Sock}},
+ CowboyOpts = CowboyOpts0#{env => Env#{keepalive_sup => KeepaliveSup},
+ stream_handlers => [rabbit_web_stomp_stream_handler, cowboy_stream_h]},
Protocol = case Transport of
ranch_tcp -> cowboy_clear;
ranch_ssl -> cowboy_tls
@@ -38,7 +38,7 @@ start_link(Ref, Sock, Transport, CowboyOpts0) ->
{ok, ReaderPid} = supervisor2:start_child(
SupPid,
{Protocol,
- {Protocol, start_link, [Ref, Sock, Transport, CowboyOpts]},
+ {Protocol, start_link, [Ref, Transport, CowboyOpts]},
intrinsic, ?WORKER_WAIT, worker, [Protocol]}),
{ok, SupPid, ReaderPid}.
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_handler.erl b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_handler.erl
index 6f417f31a2..ebec3da671 100644
--- a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_handler.erl
+++ b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_handler.erl
@@ -2,11 +2,12 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_stomp_handler).
-behaviour(cowboy_websocket).
+-behaviour(cowboy_sub_protocol).
-include_lib("rabbitmq_stomp/include/rabbit_stomp.hrl").
-include_lib("rabbitmq_stomp/include/rabbit_stomp_frame.hrl").
@@ -22,6 +23,11 @@
]).
-export([close_connection/2]).
+%% cowboy_sub_protocol
+-export([upgrade/4,
+ upgrade/5,
+ takeover/7]).
+
-record(state, {
frame_type,
heartbeat_mode,
@@ -38,17 +44,30 @@
connection
}).
+%% cowboy_sub_protcol
+upgrade(Req, Env, Handler, HandlerState) ->
+ upgrade(Req, Env, Handler, HandlerState, #{}).
+
+upgrade(Req, Env, Handler, HandlerState, Opts) ->
+ cowboy_websocket:upgrade(Req, Env, Handler, HandlerState, Opts).
+
+takeover(Parent, Ref, Socket, Transport, Opts, Buffer, {Handler, HandlerState}) ->
+ Sock = case HandlerState#state.socket of
+ undefined ->
+ Socket;
+ ProxyInfo ->
+ {rabbit_proxy_socket, Socket, ProxyInfo}
+ end,
+ Env0 = maps:get(env, Opts, #{}),
+ Env = Env0#{socket => Sock},
+ cowboy_websocket:takeover(Parent, Ref, Socket, Transport, maps:put(env, Env, Opts), Buffer,
+ {Handler, HandlerState#state{socket = Sock}}).
+
%% Websocket.
init(Req0, Opts) ->
{PeerAddr, _PeerPort} = maps:get(peer, Req0),
{_, KeepaliveSup} = lists:keyfind(keepalive_sup, 1, Opts),
- {_, Sock0} = lists:keyfind(socket, 1, Opts),
- Sock = case maps:get(proxy_header, Req0, undefined) of
- undefined ->
- Sock0;
- ProxyInfo ->
- {rabbit_proxy_socket, Sock0, ProxyInfo}
- end,
+ SockInfo = maps:get(proxy_header, Req0, undefined),
Req = case cowboy_req:parse_header(<<"sec-websocket-protocol">>, Req0) of
undefined -> Req0;
Protocols ->
@@ -61,14 +80,14 @@ init(Req0, Opts) ->
end,
WsOpts0 = proplists:get_value(ws_opts, Opts, #{}),
WsOpts = maps:merge(#{compress => true}, WsOpts0),
- {cowboy_websocket, Req, #state{
+ {?MODULE, Req, #state{
frame_type = proplists:get_value(type, Opts, text),
heartbeat_sup = KeepaliveSup,
heartbeat = {none, none},
heartbeat_mode = heartbeat,
state = running,
conserve_resources = false,
- socket = Sock,
+ socket = SockInfo,
peername = PeerAddr,
auth_hd = cowboy_req:header(<<"authorization">>, Req)
}, WsOpts}.
@@ -164,6 +183,15 @@ websocket_info({Delivery = #'basic.deliver'{},
DeliveryCtx,
ProcState0),
{ok, State#state{ proc_state = ProcState }};
+websocket_info({Delivery = #'basic.deliver'{},
+ #amqp_msg{props = Props, payload = Payload}},
+ State=#state{ proc_state = ProcState0 }) ->
+ ProcState = rabbit_stomp_processor:send_delivery(Delivery,
+ Props,
+ Payload,
+ undefined,
+ ProcState0),
+ {ok, State#state{ proc_state = ProcState }};
websocket_info(#'basic.cancel'{consumer_tag = Ctag},
State=#state{ proc_state = ProcState0 }) ->
case rabbit_stomp_processor:cancel_consumer(Ctag, ProcState0) of
@@ -207,7 +235,7 @@ websocket_info(emit_stats, State) ->
{ok, emit_stats(State)};
websocket_info(Msg, State) ->
- rabbit_log_connection:info("Web STOMP: unexpected message ~p~n",
+ rabbit_log_connection:info("Web STOMP: unexpected message ~p",
[Msg]),
{ok, State}.
@@ -239,7 +267,7 @@ handle_data(Data, State0) ->
{[{active, false}], State1};
{error, Error0} ->
Error1 = rabbit_misc:format("~p", [Error0]),
- rabbit_log_connection:error("STOMP detected framing error '~s'~n", [Error1]),
+ rabbit_log_connection:error("STOMP detected framing error '~s'", [Error1]),
stop(State0, 1007, Error1);
Other ->
Other
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_internal_event_handler.erl b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_internal_event_handler.erl
index 5e129de049..1d0e274cd2 100644
--- a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_internal_event_handler.erl
+++ b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_internal_event_handler.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_stomp_internal_event_handler).
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_listener.erl b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_listener.erl
index b644329d1d..fd4141c46b 100644
--- a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_listener.erl
+++ b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_listener.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_stomp_listener).
@@ -32,7 +32,8 @@
init() ->
WsFrame = get_env(ws_frame, text),
CowboyOpts0 = maps:from_list(get_env(cowboy_opts, [])),
- CowboyOpts = CowboyOpts0#{proxy_header => get_env(proxy_protocol, false)},
+ CowboyOpts = CowboyOpts0#{proxy_header => get_env(proxy_protocol, false),
+ stream_handlers => [rabbit_web_stomp_stream_handler, cowboy_stream_h]},
CowboyWsOpts = maps:from_list(get_env(cowboy_ws_opts, [])),
VhostRoutes = [
@@ -97,7 +98,8 @@ start_tcp_listener(TCPConf0, CowboyOpts0, Routes) ->
socket_opts => TCPConf,
connection_type => supervisor,
max_connections => get_max_connections(),
- num_acceptors => NumTcpAcceptors
+ num_acceptors => NumTcpAcceptors,
+ num_conns_sups => 1
},
CowboyOpts = CowboyOpts0#{env => #{dispatch => Routes},
middlewares => [cowboy_router,
@@ -113,13 +115,13 @@ start_tcp_listener(TCPConf0, CowboyOpts0, Routes) ->
{error, ErrTCP} ->
rabbit_log_connection:error(
"Failed to start a WebSocket (HTTP) listener. Error: ~p,"
- " listener settings: ~p~n",
+ " listener settings: ~p",
[ErrTCP, TCPConf]),
throw(ErrTCP)
end,
listener_started(?TCP_PROTOCOL, TCPConf),
rabbit_log_connection:info(
- "rabbit_web_stomp: listening for HTTP connections on ~s:~w~n",
+ "rabbit_web_stomp: listening for HTTP connections on ~s:~w",
[get_binding_address(TCPConf), Port]).
@@ -135,7 +137,8 @@ start_tls_listener(TLSConf0, CowboyOpts0, Routes) ->
socket_opts => TLSConf,
connection_type => supervisor,
max_connections => get_max_connections(),
- num_acceptors => NumSslAcceptors
+ num_acceptors => NumSslAcceptors,
+ num_conns_sups => 1
},
CowboyOpts = CowboyOpts0#{env => #{dispatch => Routes},
middlewares => [cowboy_router,
@@ -151,13 +154,13 @@ start_tls_listener(TLSConf0, CowboyOpts0, Routes) ->
{error, ErrTLS} ->
rabbit_log_connection:error(
"Failed to start a TLS WebSocket (HTTPS) listener. Error: ~p,"
- " listener settings: ~p~n",
+ " listener settings: ~p",
[ErrTLS, TLSConf]),
throw(ErrTLS)
end,
listener_started(?TLS_PROTOCOL, TLSConf),
rabbit_log_connection:info(
- "rabbit_web_stomp: listening for HTTPS connections on ~s:~w~n",
+ "rabbit_web_stomp: listening for HTTPS connections on ~s:~w",
[get_binding_address(TLSConf), TLSPort]).
listener_started(Protocol, Listener) ->
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_middleware.erl b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_middleware.erl
index 1a38493444..2fcdee2f2a 100644
--- a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_middleware.erl
+++ b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_middleware.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_stomp_middleware).
@@ -12,11 +12,9 @@
execute(Req, Env) ->
#{keepalive_sup := KeepaliveSup} = Env,
- Sock = maps:get(socket, Env),
case maps:get(handler_opts, Env, undefined) of
undefined -> {ok, Req, Env};
Opts when is_list(Opts) ->
- {ok, Req, Env#{handler_opts => [{keepalive_sup, KeepaliveSup},
- {socket, Sock}
+ {ok, Req, Env#{handler_opts => [{keepalive_sup, KeepaliveSup}
|Opts]}}
end.
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_stream_handler.erl b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_stream_handler.erl
new file mode 100644
index 0000000000..fbaad3f4dc
--- /dev/null
+++ b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_stream_handler.erl
@@ -0,0 +1,41 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
+%%
+-module(rabbit_web_stomp_stream_handler).
+
+-behavior(cowboy_stream).
+
+-export([init/3]).
+-export([data/4]).
+-export([info/3]).
+-export([terminate/3]).
+-export([early_error/5]).
+
+
+-record(state, {next}).
+
+init(StreamID, Req, Opts) ->
+ {Commands, Next} = cowboy_stream:init(StreamID, Req, Opts),
+ {Commands, #state{next = Next}}.
+
+data(StreamID, IsFin, Data, State = #state{next = Next0}) ->
+ {Commands, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0),
+ {Commands, State#state{next = Next}}.
+
+info(StreamID, {switch_protocol, Headers, _, InitialState}, State) ->
+ do_info(StreamID, {switch_protocol, Headers, rabbit_web_stomp_handler, InitialState}, State);
+info(StreamID, Info, State) ->
+ do_info(StreamID, Info, State).
+
+do_info(StreamID, Info, State = #state{next = Next0}) ->
+ {Commands, Next} = cowboy_stream:info(StreamID, Info, Next0),
+ {Commands, State#state{next = Next}}.
+
+terminate(StreamID, Reason, #state{next = Next}) ->
+ cowboy_stream:terminate(StreamID, Reason, Next).
+
+early_error(StreamID, Reason, PartialReq, Resp, Opts) ->
+ cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts).
diff --git a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_sup.erl b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_sup.erl
index 97f1659806..e4468728d0 100644
--- a/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_sup.erl
+++ b/deps/rabbitmq_web_stomp/src/rabbit_web_stomp_sup.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_stomp_sup).
diff --git a/deps/rabbitmq_web_stomp/test/amqp_stomp_SUITE.erl b/deps/rabbitmq_web_stomp/test/amqp_stomp_SUITE.erl
index 695d993149..4d0bf6fecf 100644
--- a/deps/rabbitmq_web_stomp/test/amqp_stomp_SUITE.erl
+++ b/deps/rabbitmq_web_stomp/test/amqp_stomp_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(amqp_stomp_SUITE).
diff --git a/deps/rabbitmq_web_stomp/test/config_schema_SUITE.erl b/deps/rabbitmq_web_stomp/test/config_schema_SUITE.erl
index 391b43dfb4..6a6bec7a1a 100644
--- a/deps/rabbitmq_web_stomp/test/config_schema_SUITE.erl
+++ b/deps/rabbitmq_web_stomp/test/config_schema_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(config_schema_SUITE).
diff --git a/deps/rabbitmq_web_stomp/test/cowboy_websocket_SUITE.erl b/deps/rabbitmq_web_stomp/test/cowboy_websocket_SUITE.erl
index 36d05849ac..3d54ccf58a 100644
--- a/deps/rabbitmq_web_stomp/test/cowboy_websocket_SUITE.erl
+++ b/deps/rabbitmq_web_stomp/test/cowboy_websocket_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(cowboy_websocket_SUITE).
diff --git a/deps/rabbitmq_web_stomp/test/proxy_protocol_SUITE.erl b/deps/rabbitmq_web_stomp/test/proxy_protocol_SUITE.erl
index 4a153d37cc..13b0aa5b0a 100644
--- a/deps/rabbitmq_web_stomp/test/proxy_protocol_SUITE.erl
+++ b/deps/rabbitmq_web_stomp/test/proxy_protocol_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(proxy_protocol_SUITE).
@@ -91,7 +91,7 @@ proxy_protocol(Config) ->
{ok, _P} = rfc6455_client:recv(WS),
ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
?MODULE, connection_name, []),
- match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 -> 192.168.1.2:81$">>, [{capture, none}]),
{close, _} = rfc6455_client:close(WS),
ok.
diff --git a/deps/rabbitmq_web_stomp/test/raw_websocket_SUITE.erl b/deps/rabbitmq_web_stomp/test/raw_websocket_SUITE.erl
index f05b87751c..bc0f15db74 100644
--- a/deps/rabbitmq_web_stomp/test/raw_websocket_SUITE.erl
+++ b/deps/rabbitmq_web_stomp/test/raw_websocket_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(raw_websocket_SUITE).
diff --git a/deps/rabbitmq_web_stomp/test/src/rabbit_ws_test_util.erl b/deps/rabbitmq_web_stomp/test/src/rabbit_ws_test_util.erl
index 822d9c27f1..9561948ece 100644
--- a/deps/rabbitmq_web_stomp/test/src/rabbit_ws_test_util.erl
+++ b/deps/rabbitmq_web_stomp/test/src/rabbit_ws_test_util.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_ws_test_util).
diff --git a/deps/rabbitmq_web_stomp/test/src/rfc6455_client.erl b/deps/rabbitmq_web_stomp/test/src/rfc6455_client.erl
index 5ef4c40099..8d6e43b2d3 100644
--- a/deps/rabbitmq_web_stomp/test/src/rfc6455_client.erl
+++ b/deps/rabbitmq_web_stomp/test/src/rfc6455_client.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rfc6455_client).
diff --git a/deps/rabbitmq_web_stomp/test/src/stomp.erl b/deps/rabbitmq_web_stomp/test/src/stomp.erl
index 5ee1a2c43a..c9cef3f23e 100644
--- a/deps/rabbitmq_web_stomp/test/src/stomp.erl
+++ b/deps/rabbitmq_web_stomp/test/src/stomp.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(stomp).
diff --git a/deps/rabbitmq_web_stomp/test/unit_SUITE.erl b/deps/rabbitmq_web_stomp/test/unit_SUITE.erl
index 4e7ad10861..7902f1ce9a 100644
--- a/deps/rabbitmq_web_stomp/test/unit_SUITE.erl
+++ b/deps/rabbitmq_web_stomp/test/unit_SUITE.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(unit_SUITE).
diff --git a/deps/rabbitmq_web_stomp_examples/BUILD.bazel b/deps/rabbitmq_web_stomp_examples/BUILD.bazel
new file mode 100644
index 0000000000..7593e205e6
--- /dev/null
+++ b/deps/rabbitmq_web_stomp_examples/BUILD.bazel
@@ -0,0 +1,40 @@
+load("@bazel-erlang//:xref.bzl", "xref")
+load("@bazel-erlang//:dialyze.bzl", "dialyze")
+load(
+ "//:rabbitmq.bzl",
+ "RABBITMQ_DIALYZER_OPTS",
+ "rabbitmq_lib",
+)
+
+APP_NAME = "rabbitmq_web_stomp_examples"
+
+APP_DESCRIPTION = "Rabbit WEB-STOMP - examples"
+
+APP_MODULE = "rabbit_web_stomp_examples_app"
+
+APP_ENV = """[
+ {listener, [{port, 15670}]}
+ ]"""
+
+RUNTIME_DEPS = [
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_web_dispatch:bazel_erlang_lib",
+ "//deps/rabbitmq_web_stomp:bazel_erlang_lib",
+]
+
+rabbitmq_lib(
+ app_description = APP_DESCRIPTION,
+ app_env = APP_ENV,
+ app_module = APP_MODULE,
+ app_name = APP_NAME,
+ runtime_deps = RUNTIME_DEPS,
+)
+
+xref(tags = ["xref"])
+
+dialyze(
+ dialyzer_opts = RABBITMQ_DIALYZER_OPTS,
+ plt = "//:base_plt",
+ tags = ["dialyze"],
+)
diff --git a/deps/rabbitmq_web_stomp_examples/Makefile b/deps/rabbitmq_web_stomp_examples/Makefile
index 61b30b8707..d845375694 100644
--- a/deps/rabbitmq_web_stomp_examples/Makefile
+++ b/deps/rabbitmq_web_stomp_examples/Makefile
@@ -19,5 +19,5 @@ DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
ERLANG_MK_COMMIT = rabbitmq-tmp
-include rabbitmq-components.mk
-include erlang.mk
+include ../../rabbitmq-components.mk
+include ../../erlang.mk
diff --git a/deps/rabbitmq_web_stomp_examples/erlang.mk b/deps/rabbitmq_web_stomp_examples/erlang.mk
deleted file mode 100644
index fce4be0b0a..0000000000
--- a/deps/rabbitmq_web_stomp_examples/erlang.mk
+++ /dev/null
@@ -1,7808 +0,0 @@
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
-
-ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
-export ERLANG_MK_FILENAME
-
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
-ERLANG_MK_WITHOUT =
-
-# Make 3.81 and 3.82 are deprecated.
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
-$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
-endif
-
-# Core configuration.
-
-PROJECT ?= $(notdir $(CURDIR))
-PROJECT := $(strip $(PROJECT))
-
-PROJECT_VERSION ?= rolling
-PROJECT_MOD ?= $(PROJECT)_app
-PROJECT_ENV ?= []
-
-# Verbosity.
-
-V ?= 0
-
-verbose_0 = @
-verbose_2 = set -x;
-verbose = $(verbose_$(V))
-
-ifeq ($(V),3)
-SHELL := $(SHELL) -x
-endif
-
-gen_verbose_0 = @echo " GEN " $@;
-gen_verbose_2 = set -x;
-gen_verbose = $(gen_verbose_$(V))
-
-gen_verbose_esc_0 = @echo " GEN " $$@;
-gen_verbose_esc_2 = set -x;
-gen_verbose_esc = $(gen_verbose_esc_$(V))
-
-# Temporary files directory.
-
-ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
-export ERLANG_MK_TMP
-
-# "erl" command.
-
-ERL = erl +A1 -noinput -boot no_dot_erlang
-
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-
-export PLATFORM
-endif
-
-# Core targets.
-
-all:: deps app rel
-
-# Noop to avoid a Make warning when there's nothing to do.
-rel::
- $(verbose) :
-
-relup:: deps app
-
-check:: tests
-
-clean:: clean-crashdump
-
-clean-crashdump:
-ifneq ($(wildcard erl_crash.dump),)
- $(gen_verbose) rm -f erl_crash.dump
-endif
-
-distclean:: clean distclean-tmp
-
-$(ERLANG_MK_TMP):
- $(verbose) mkdir -p $(ERLANG_MK_TMP)
-
-distclean-tmp:
- $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
-
-help::
- $(verbose) printf "%s\n" \
- "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
- "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
- "" \
- "Usage: [V=1] $(MAKE) [target]..." \
- "" \
- "Core targets:" \
- " all Run deps, app and rel targets in that order" \
- " app Compile the project" \
- " deps Fetch dependencies (if needed) and compile them" \
- " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
- " list-deps List dependencies recursively on stdout" \
- " search q=... Search for a package in the built-in index" \
- " rel Build a release for this project, if applicable" \
- " docs Build the documentation for this project" \
- " install-docs Install the man pages for this project" \
- " check Compile and run all tests and analysis for this project" \
- " tests Run the tests for this project" \
- " clean Delete temporary and output files from most targets" \
- " distclean Delete all temporary and output files" \
- " help Display this help and exit" \
- " erlang-mk Update erlang.mk to the latest version"
-
-# Core functions.
-
-empty :=
-space := $(empty) $(empty)
-tab := $(empty) $(empty)
-comma := ,
-
-define newline
-
-
-endef
-
-define comma_list
-$(subst $(space),$(comma),$(strip $(1)))
-endef
-
-define escape_dquotes
-$(subst ",\",$1)
-endef
-
-# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
-define erlang
-$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
-endef
-
-ifeq ($(PLATFORM),msys2)
-core_native_path = $(shell cygpath -m $1)
-else
-core_native_path = $1
-endif
-
-core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
-
-core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
-
-# We skip files that contain spaces because they end up causing issues.
-core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
-
-core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
-
-core_ls = $(filter-out $(1),$(shell echo $(1)))
-
-# @todo Use a solution that does not require using perl.
-core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
-
-define core_render
- printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
-endef
-
-# Automated update.
-
-ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
-ERLANG_MK_COMMIT ?=
-ERLANG_MK_BUILD_CONFIG ?= build.config
-ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
-
-erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
-erlang-mk:
-ifdef ERLANG_MK_COMMIT
- $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
- $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
-else
- $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
-endif
- $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
- $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
- $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
- $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
- $(verbose) rm -rf $(ERLANG_MK_TMP)
-
-# The erlang.mk package index is bundled in the default erlang.mk build.
-# Search for the string "copyright" to skip to the rest of the code.
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-kerl
-
-KERL_INSTALL_DIR ?= $(HOME)/erlang
-
-ifeq ($(strip $(KERL)),)
-KERL := $(ERLANG_MK_TMP)/kerl/kerl
-endif
-
-KERL_DIR = $(ERLANG_MK_TMP)/kerl
-
-export KERL
-
-KERL_GIT ?= https://github.com/kerl/kerl
-KERL_COMMIT ?= master
-
-KERL_MAKEFLAGS ?=
-
-OTP_GIT ?= https://github.com/erlang/otp
-
-define kerl_otp_target
-$(KERL_INSTALL_DIR)/$(1): $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
- $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
- fi
-endef
-
-define kerl_hipe_target
-$(KERL_INSTALL_DIR)/$1-native: $(KERL)
- $(verbose) if [ ! -d $$@ ]; then \
- KERL_CONFIGURE_OPTIONS=--enable-native-libs \
- MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
- $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
- fi
-endef
-
-$(KERL): $(KERL_DIR)
-
-$(KERL_DIR): | $(ERLANG_MK_TMP)
- $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
- $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
- $(verbose) chmod +x $(KERL)
-
-distclean:: distclean-kerl
-
-distclean-kerl:
- $(gen_verbose) rm -rf $(KERL_DIR)
-
-# Allow users to select which version of Erlang/OTP to use for a project.
-
-ifneq ($(strip $(LATEST_ERLANG_OTP)),)
-# In some environments it is necessary to filter out master.
-ERLANG_OTP := $(notdir $(lastword $(sort\
- $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
- $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
-endif
-
-ERLANG_OTP ?=
-ERLANG_HIPE ?=
-
-# Use kerl to enforce a specific Erlang/OTP version for a project.
-ifneq ($(strip $(ERLANG_OTP)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
-$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-else
-# Same for a HiPE enabled VM.
-ifneq ($(strip $(ERLANG_HIPE)),)
-export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
-SHELL := env PATH=$(PATH) $(SHELL)
-$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
-
-# Build Erlang/OTP only if it doesn't already exist.
-ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
-$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
-$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
-endif
-
-endif
-endif
-
-PACKAGES += aberth
-pkg_aberth_name = aberth
-pkg_aberth_description = Generic BERT-RPC server in Erlang
-pkg_aberth_homepage = https://github.com/a13x/aberth
-pkg_aberth_fetch = git
-pkg_aberth_repo = https://github.com/a13x/aberth
-pkg_aberth_commit = master
-
-PACKAGES += active
-pkg_active_name = active
-pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
-pkg_active_homepage = https://github.com/proger/active
-pkg_active_fetch = git
-pkg_active_repo = https://github.com/proger/active
-pkg_active_commit = master
-
-PACKAGES += actordb_core
-pkg_actordb_core_name = actordb_core
-pkg_actordb_core_description = ActorDB main source
-pkg_actordb_core_homepage = http://www.actordb.com/
-pkg_actordb_core_fetch = git
-pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
-pkg_actordb_core_commit = master
-
-PACKAGES += actordb_thrift
-pkg_actordb_thrift_name = actordb_thrift
-pkg_actordb_thrift_description = Thrift API for ActorDB
-pkg_actordb_thrift_homepage = http://www.actordb.com/
-pkg_actordb_thrift_fetch = git
-pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
-pkg_actordb_thrift_commit = master
-
-PACKAGES += aleppo
-pkg_aleppo_name = aleppo
-pkg_aleppo_description = Alternative Erlang Pre-Processor
-pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
-pkg_aleppo_fetch = git
-pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
-pkg_aleppo_commit = master
-
-PACKAGES += alog
-pkg_alog_name = alog
-pkg_alog_description = Simply the best logging framework for Erlang
-pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
-pkg_alog_fetch = git
-pkg_alog_repo = https://github.com/siberian-fast-food/alogger
-pkg_alog_commit = master
-
-PACKAGES += amqp_client
-pkg_amqp_client_name = amqp_client
-pkg_amqp_client_description = RabbitMQ Erlang AMQP client
-pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
-pkg_amqp_client_fetch = git
-pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
-pkg_amqp_client_commit = master
-
-PACKAGES += annotations
-pkg_annotations_name = annotations
-pkg_annotations_description = Simple code instrumentation utilities
-pkg_annotations_homepage = https://github.com/hyperthunk/annotations
-pkg_annotations_fetch = git
-pkg_annotations_repo = https://github.com/hyperthunk/annotations
-pkg_annotations_commit = master
-
-PACKAGES += antidote
-pkg_antidote_name = antidote
-pkg_antidote_description = Large-scale computation without synchronisation
-pkg_antidote_homepage = https://syncfree.lip6.fr/
-pkg_antidote_fetch = git
-pkg_antidote_repo = https://github.com/SyncFree/antidote
-pkg_antidote_commit = master
-
-PACKAGES += apns
-pkg_apns_name = apns
-pkg_apns_description = Apple Push Notification Server for Erlang
-pkg_apns_homepage = http://inaka.github.com/apns4erl
-pkg_apns_fetch = git
-pkg_apns_repo = https://github.com/inaka/apns4erl
-pkg_apns_commit = master
-
-PACKAGES += asciideck
-pkg_asciideck_name = asciideck
-pkg_asciideck_description = Asciidoc for Erlang.
-pkg_asciideck_homepage = https://ninenines.eu
-pkg_asciideck_fetch = git
-pkg_asciideck_repo = https://github.com/ninenines/asciideck
-pkg_asciideck_commit = master
-
-PACKAGES += azdht
-pkg_azdht_name = azdht
-pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
-pkg_azdht_homepage = https://github.com/arcusfelis/azdht
-pkg_azdht_fetch = git
-pkg_azdht_repo = https://github.com/arcusfelis/azdht
-pkg_azdht_commit = master
-
-PACKAGES += backoff
-pkg_backoff_name = backoff
-pkg_backoff_description = Simple exponential backoffs in Erlang
-pkg_backoff_homepage = https://github.com/ferd/backoff
-pkg_backoff_fetch = git
-pkg_backoff_repo = https://github.com/ferd/backoff
-pkg_backoff_commit = master
-
-PACKAGES += barrel_tcp
-pkg_barrel_tcp_name = barrel_tcp
-pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
-pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_fetch = git
-pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
-pkg_barrel_tcp_commit = master
-
-PACKAGES += basho_bench
-pkg_basho_bench_name = basho_bench
-pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
-pkg_basho_bench_homepage = https://github.com/basho/basho_bench
-pkg_basho_bench_fetch = git
-pkg_basho_bench_repo = https://github.com/basho/basho_bench
-pkg_basho_bench_commit = master
-
-PACKAGES += bcrypt
-pkg_bcrypt_name = bcrypt
-pkg_bcrypt_description = Bcrypt Erlang / C library
-pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
-pkg_bcrypt_fetch = git
-pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
-pkg_bcrypt_commit = master
-
-PACKAGES += beam
-pkg_beam_name = beam
-pkg_beam_description = BEAM emulator written in Erlang
-pkg_beam_homepage = https://github.com/tonyrog/beam
-pkg_beam_fetch = git
-pkg_beam_repo = https://github.com/tonyrog/beam
-pkg_beam_commit = master
-
-PACKAGES += beanstalk
-pkg_beanstalk_name = beanstalk
-pkg_beanstalk_description = An Erlang client for beanstalkd
-pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_fetch = git
-pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
-pkg_beanstalk_commit = master
-
-PACKAGES += bear
-pkg_bear_name = bear
-pkg_bear_description = a set of statistics functions for erlang
-pkg_bear_homepage = https://github.com/boundary/bear
-pkg_bear_fetch = git
-pkg_bear_repo = https://github.com/boundary/bear
-pkg_bear_commit = master
-
-PACKAGES += bertconf
-pkg_bertconf_name = bertconf
-pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
-pkg_bertconf_homepage = https://github.com/ferd/bertconf
-pkg_bertconf_fetch = git
-pkg_bertconf_repo = https://github.com/ferd/bertconf
-pkg_bertconf_commit = master
-
-PACKAGES += bifrost
-pkg_bifrost_name = bifrost
-pkg_bifrost_description = Erlang FTP Server Framework
-pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
-pkg_bifrost_fetch = git
-pkg_bifrost_repo = https://github.com/thorstadt/bifrost
-pkg_bifrost_commit = master
-
-PACKAGES += binpp
-pkg_binpp_name = binpp
-pkg_binpp_description = Erlang Binary Pretty Printer
-pkg_binpp_homepage = https://github.com/jtendo/binpp
-pkg_binpp_fetch = git
-pkg_binpp_repo = https://github.com/jtendo/binpp
-pkg_binpp_commit = master
-
-PACKAGES += bisect
-pkg_bisect_name = bisect
-pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
-pkg_bisect_homepage = https://github.com/knutin/bisect
-pkg_bisect_fetch = git
-pkg_bisect_repo = https://github.com/knutin/bisect
-pkg_bisect_commit = master
-
-PACKAGES += bitcask
-pkg_bitcask_name = bitcask
-pkg_bitcask_description = because you need another a key/value storage engine
-pkg_bitcask_homepage = https://github.com/basho/bitcask
-pkg_bitcask_fetch = git
-pkg_bitcask_repo = https://github.com/basho/bitcask
-pkg_bitcask_commit = develop
-
-PACKAGES += bitstore
-pkg_bitstore_name = bitstore
-pkg_bitstore_description = A document based ontology development environment
-pkg_bitstore_homepage = https://github.com/bdionne/bitstore
-pkg_bitstore_fetch = git
-pkg_bitstore_repo = https://github.com/bdionne/bitstore
-pkg_bitstore_commit = master
-
-PACKAGES += bootstrap
-pkg_bootstrap_name = bootstrap
-pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
-pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
-pkg_bootstrap_fetch = git
-pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
-pkg_bootstrap_commit = master
-
-PACKAGES += boss
-pkg_boss_name = boss
-pkg_boss_description = Erlang web MVC, now featuring Comet
-pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_fetch = git
-pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
-pkg_boss_commit = master
-
-PACKAGES += boss_db
-pkg_boss_db_name = boss_db
-pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
-pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
-pkg_boss_db_fetch = git
-pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
-pkg_boss_db_commit = master
-
-PACKAGES += brod
-pkg_brod_name = brod
-pkg_brod_description = Kafka client in Erlang
-pkg_brod_homepage = https://github.com/klarna/brod
-pkg_brod_fetch = git
-pkg_brod_repo = https://github.com/klarna/brod.git
-pkg_brod_commit = master
-
-PACKAGES += bson
-pkg_bson_name = bson
-pkg_bson_description = BSON documents in Erlang, see bsonspec.org
-pkg_bson_homepage = https://github.com/comtihon/bson-erlang
-pkg_bson_fetch = git
-pkg_bson_repo = https://github.com/comtihon/bson-erlang
-pkg_bson_commit = master
-
-PACKAGES += bullet
-pkg_bullet_name = bullet
-pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
-pkg_bullet_homepage = http://ninenines.eu
-pkg_bullet_fetch = git
-pkg_bullet_repo = https://github.com/ninenines/bullet
-pkg_bullet_commit = master
-
-PACKAGES += cache
-pkg_cache_name = cache
-pkg_cache_description = Erlang in-memory cache
-pkg_cache_homepage = https://github.com/fogfish/cache
-pkg_cache_fetch = git
-pkg_cache_repo = https://github.com/fogfish/cache
-pkg_cache_commit = master
-
-PACKAGES += cake
-pkg_cake_name = cake
-pkg_cake_description = Really simple terminal colorization
-pkg_cake_homepage = https://github.com/darach/cake-erl
-pkg_cake_fetch = git
-pkg_cake_repo = https://github.com/darach/cake-erl
-pkg_cake_commit = master
-
-PACKAGES += carotene
-pkg_carotene_name = carotene
-pkg_carotene_description = Real-time server
-pkg_carotene_homepage = https://github.com/carotene/carotene
-pkg_carotene_fetch = git
-pkg_carotene_repo = https://github.com/carotene/carotene
-pkg_carotene_commit = master
-
-PACKAGES += cberl
-pkg_cberl_name = cberl
-pkg_cberl_description = NIF based Erlang bindings for Couchbase
-pkg_cberl_homepage = https://github.com/chitika/cberl
-pkg_cberl_fetch = git
-pkg_cberl_repo = https://github.com/chitika/cberl
-pkg_cberl_commit = master
-
-PACKAGES += cecho
-pkg_cecho_name = cecho
-pkg_cecho_description = An ncurses library for Erlang
-pkg_cecho_homepage = https://github.com/mazenharake/cecho
-pkg_cecho_fetch = git
-pkg_cecho_repo = https://github.com/mazenharake/cecho
-pkg_cecho_commit = master
-
-PACKAGES += cferl
-pkg_cferl_name = cferl
-pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
-pkg_cferl_homepage = https://github.com/ddossot/cferl
-pkg_cferl_fetch = git
-pkg_cferl_repo = https://github.com/ddossot/cferl
-pkg_cferl_commit = master
-
-PACKAGES += chaos_monkey
-pkg_chaos_monkey_name = chaos_monkey
-pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
-pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_fetch = git
-pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
-pkg_chaos_monkey_commit = master
-
-PACKAGES += check_node
-pkg_check_node_name = check_node
-pkg_check_node_description = Nagios Scripts for monitoring Riak
-pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
-pkg_check_node_fetch = git
-pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
-pkg_check_node_commit = master
-
-PACKAGES += chronos
-pkg_chronos_name = chronos
-pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
-pkg_chronos_homepage = https://github.com/lehoff/chronos
-pkg_chronos_fetch = git
-pkg_chronos_repo = https://github.com/lehoff/chronos
-pkg_chronos_commit = master
-
-PACKAGES += chumak
-pkg_chumak_name = chumak
-pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
-pkg_chumak_homepage = http://choven.ca
-pkg_chumak_fetch = git
-pkg_chumak_repo = https://github.com/chovencorp/chumak
-pkg_chumak_commit = master
-
-PACKAGES += cl
-pkg_cl_name = cl
-pkg_cl_description = OpenCL binding for Erlang
-pkg_cl_homepage = https://github.com/tonyrog/cl
-pkg_cl_fetch = git
-pkg_cl_repo = https://github.com/tonyrog/cl
-pkg_cl_commit = master
-
-PACKAGES += clique
-pkg_clique_name = clique
-pkg_clique_description = CLI Framework for Erlang
-pkg_clique_homepage = https://github.com/basho/clique
-pkg_clique_fetch = git
-pkg_clique_repo = https://github.com/basho/clique
-pkg_clique_commit = develop
-
-PACKAGES += cloudi_core
-pkg_cloudi_core_name = cloudi_core
-pkg_cloudi_core_description = CloudI internal service runtime
-pkg_cloudi_core_homepage = http://cloudi.org/
-pkg_cloudi_core_fetch = git
-pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
-pkg_cloudi_core_commit = master
-
-PACKAGES += cloudi_service_api_requests
-pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
-pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
-pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
-pkg_cloudi_service_api_requests_fetch = git
-pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
-pkg_cloudi_service_api_requests_commit = master
-
-PACKAGES += cloudi_service_db
-pkg_cloudi_service_db_name = cloudi_service_db
-pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
-pkg_cloudi_service_db_homepage = http://cloudi.org/
-pkg_cloudi_service_db_fetch = git
-pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
-pkg_cloudi_service_db_commit = master
-
-PACKAGES += cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
-pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_fetch = git
-pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
-pkg_cloudi_service_db_cassandra_commit = master
-
-PACKAGES += cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
-pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_cassandra_cql_fetch = git
-pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
-pkg_cloudi_service_db_cassandra_cql_commit = master
-
-PACKAGES += cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
-pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
-pkg_cloudi_service_db_couchdb_fetch = git
-pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
-pkg_cloudi_service_db_couchdb_commit = master
-
-PACKAGES += cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
-pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
-pkg_cloudi_service_db_elasticsearch_fetch = git
-pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
-pkg_cloudi_service_db_elasticsearch_commit = master
-
-PACKAGES += cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_description = memcached CloudI Service
-pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
-pkg_cloudi_service_db_memcached_fetch = git
-pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
-pkg_cloudi_service_db_memcached_commit = master
-
-PACKAGES += cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
-pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_mysql_fetch = git
-pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
-pkg_cloudi_service_db_mysql_commit = master
-
-PACKAGES += cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
-pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
-pkg_cloudi_service_db_pgsql_fetch = git
-pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
-pkg_cloudi_service_db_pgsql_commit = master
-
-PACKAGES += cloudi_service_db_riak
-pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
-pkg_cloudi_service_db_riak_description = Riak CloudI Service
-pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
-pkg_cloudi_service_db_riak_fetch = git
-pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
-pkg_cloudi_service_db_riak_commit = master
-
-PACKAGES += cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
-pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
-pkg_cloudi_service_db_tokyotyrant_fetch = git
-pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
-pkg_cloudi_service_db_tokyotyrant_commit = master
-
-PACKAGES += cloudi_service_filesystem
-pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
-pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
-pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
-pkg_cloudi_service_filesystem_fetch = git
-pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
-pkg_cloudi_service_filesystem_commit = master
-
-PACKAGES += cloudi_service_http_client
-pkg_cloudi_service_http_client_name = cloudi_service_http_client
-pkg_cloudi_service_http_client_description = HTTP client CloudI Service
-pkg_cloudi_service_http_client_homepage = http://cloudi.org/
-pkg_cloudi_service_http_client_fetch = git
-pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
-pkg_cloudi_service_http_client_commit = master
-
-PACKAGES += cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
-pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
-pkg_cloudi_service_http_cowboy_fetch = git
-pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
-pkg_cloudi_service_http_cowboy_commit = master
-
-PACKAGES += cloudi_service_http_elli
-pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
-pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
-pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
-pkg_cloudi_service_http_elli_fetch = git
-pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
-pkg_cloudi_service_http_elli_commit = master
-
-PACKAGES += cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
-pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
-pkg_cloudi_service_map_reduce_fetch = git
-pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
-pkg_cloudi_service_map_reduce_commit = master
-
-PACKAGES += cloudi_service_oauth1
-pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
-pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
-pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
-pkg_cloudi_service_oauth1_fetch = git
-pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
-pkg_cloudi_service_oauth1_commit = master
-
-PACKAGES += cloudi_service_queue
-pkg_cloudi_service_queue_name = cloudi_service_queue
-pkg_cloudi_service_queue_description = Persistent Queue Service
-pkg_cloudi_service_queue_homepage = http://cloudi.org/
-pkg_cloudi_service_queue_fetch = git
-pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
-pkg_cloudi_service_queue_commit = master
-
-PACKAGES += cloudi_service_quorum
-pkg_cloudi_service_quorum_name = cloudi_service_quorum
-pkg_cloudi_service_quorum_description = CloudI Quorum Service
-pkg_cloudi_service_quorum_homepage = http://cloudi.org/
-pkg_cloudi_service_quorum_fetch = git
-pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
-pkg_cloudi_service_quorum_commit = master
-
-PACKAGES += cloudi_service_router
-pkg_cloudi_service_router_name = cloudi_service_router
-pkg_cloudi_service_router_description = CloudI Router Service
-pkg_cloudi_service_router_homepage = http://cloudi.org/
-pkg_cloudi_service_router_fetch = git
-pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
-pkg_cloudi_service_router_commit = master
-
-PACKAGES += cloudi_service_tcp
-pkg_cloudi_service_tcp_name = cloudi_service_tcp
-pkg_cloudi_service_tcp_description = TCP CloudI Service
-pkg_cloudi_service_tcp_homepage = http://cloudi.org/
-pkg_cloudi_service_tcp_fetch = git
-pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
-pkg_cloudi_service_tcp_commit = master
-
-PACKAGES += cloudi_service_timers
-pkg_cloudi_service_timers_name = cloudi_service_timers
-pkg_cloudi_service_timers_description = Timers CloudI Service
-pkg_cloudi_service_timers_homepage = http://cloudi.org/
-pkg_cloudi_service_timers_fetch = git
-pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
-pkg_cloudi_service_timers_commit = master
-
-PACKAGES += cloudi_service_udp
-pkg_cloudi_service_udp_name = cloudi_service_udp
-pkg_cloudi_service_udp_description = UDP CloudI Service
-pkg_cloudi_service_udp_homepage = http://cloudi.org/
-pkg_cloudi_service_udp_fetch = git
-pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
-pkg_cloudi_service_udp_commit = master
-
-PACKAGES += cloudi_service_validate
-pkg_cloudi_service_validate_name = cloudi_service_validate
-pkg_cloudi_service_validate_description = CloudI Validate Service
-pkg_cloudi_service_validate_homepage = http://cloudi.org/
-pkg_cloudi_service_validate_fetch = git
-pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
-pkg_cloudi_service_validate_commit = master
-
-PACKAGES += cloudi_service_zeromq
-pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
-pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
-pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
-pkg_cloudi_service_zeromq_fetch = git
-pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
-pkg_cloudi_service_zeromq_commit = master
-
-PACKAGES += cluster_info
-pkg_cluster_info_name = cluster_info
-pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
-pkg_cluster_info_homepage = https://github.com/basho/cluster_info
-pkg_cluster_info_fetch = git
-pkg_cluster_info_repo = https://github.com/basho/cluster_info
-pkg_cluster_info_commit = master
-
-PACKAGES += color
-pkg_color_name = color
-pkg_color_description = ANSI colors for your Erlang
-pkg_color_homepage = https://github.com/julianduque/erlang-color
-pkg_color_fetch = git
-pkg_color_repo = https://github.com/julianduque/erlang-color
-pkg_color_commit = master
-
-PACKAGES += confetti
-pkg_confetti_name = confetti
-pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
-pkg_confetti_homepage = https://github.com/jtendo/confetti
-pkg_confetti_fetch = git
-pkg_confetti_repo = https://github.com/jtendo/confetti
-pkg_confetti_commit = master
-
-PACKAGES += couchbeam
-pkg_couchbeam_name = couchbeam
-pkg_couchbeam_description = Apache CouchDB client in Erlang
-pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
-pkg_couchbeam_fetch = git
-pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
-pkg_couchbeam_commit = master
-
-PACKAGES += covertool
-pkg_covertool_name = covertool
-pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
-pkg_covertool_homepage = https://github.com/idubrov/covertool
-pkg_covertool_fetch = git
-pkg_covertool_repo = https://github.com/idubrov/covertool
-pkg_covertool_commit = master
-
-PACKAGES += cowboy
-pkg_cowboy_name = cowboy
-pkg_cowboy_description = Small, fast and modular HTTP server.
-pkg_cowboy_homepage = http://ninenines.eu
-pkg_cowboy_fetch = git
-pkg_cowboy_repo = https://github.com/ninenines/cowboy
-pkg_cowboy_commit = 1.0.4
-
-PACKAGES += cowdb
-pkg_cowdb_name = cowdb
-pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
-pkg_cowdb_homepage = https://github.com/refuge/cowdb
-pkg_cowdb_fetch = git
-pkg_cowdb_repo = https://github.com/refuge/cowdb
-pkg_cowdb_commit = master
-
-PACKAGES += cowlib
-pkg_cowlib_name = cowlib
-pkg_cowlib_description = Support library for manipulating Web protocols.
-pkg_cowlib_homepage = http://ninenines.eu
-pkg_cowlib_fetch = git
-pkg_cowlib_repo = https://github.com/ninenines/cowlib
-pkg_cowlib_commit = 1.0.2
-
-PACKAGES += cpg
-pkg_cpg_name = cpg
-pkg_cpg_description = CloudI Process Groups
-pkg_cpg_homepage = https://github.com/okeuday/cpg
-pkg_cpg_fetch = git
-pkg_cpg_repo = https://github.com/okeuday/cpg
-pkg_cpg_commit = master
-
-PACKAGES += cqerl
-pkg_cqerl_name = cqerl
-pkg_cqerl_description = Native Erlang CQL client for Cassandra
-pkg_cqerl_homepage = https://matehat.github.io/cqerl/
-pkg_cqerl_fetch = git
-pkg_cqerl_repo = https://github.com/matehat/cqerl
-pkg_cqerl_commit = master
-
-PACKAGES += cr
-pkg_cr_name = cr
-pkg_cr_description = Chain Replication
-pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
-pkg_cr_fetch = git
-pkg_cr_repo = https://github.com/spawnproc/cr
-pkg_cr_commit = master
-
-PACKAGES += cuttlefish
-pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
-pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
-pkg_cuttlefish_commit = master
-
-PACKAGES += damocles
-pkg_damocles_name = damocles
-pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
-pkg_damocles_homepage = https://github.com/lostcolony/damocles
-pkg_damocles_fetch = git
-pkg_damocles_repo = https://github.com/lostcolony/damocles
-pkg_damocles_commit = master
-
-PACKAGES += debbie
-pkg_debbie_name = debbie
-pkg_debbie_description = .DEB Built In Erlang
-pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
-pkg_debbie_fetch = git
-pkg_debbie_repo = https://github.com/crownedgrouse/debbie
-pkg_debbie_commit = master
-
-PACKAGES += decimal
-pkg_decimal_name = decimal
-pkg_decimal_description = An Erlang decimal arithmetic library
-pkg_decimal_homepage = https://github.com/tim/erlang-decimal
-pkg_decimal_fetch = git
-pkg_decimal_repo = https://github.com/tim/erlang-decimal
-pkg_decimal_commit = master
-
-PACKAGES += detergent
-pkg_detergent_name = detergent
-pkg_detergent_description = An emulsifying Erlang SOAP library
-pkg_detergent_homepage = https://github.com/devinus/detergent
-pkg_detergent_fetch = git
-pkg_detergent_repo = https://github.com/devinus/detergent
-pkg_detergent_commit = master
-
-PACKAGES += detest
-pkg_detest_name = detest
-pkg_detest_description = Tool for running tests on a cluster of erlang nodes
-pkg_detest_homepage = https://github.com/biokoda/detest
-pkg_detest_fetch = git
-pkg_detest_repo = https://github.com/biokoda/detest
-pkg_detest_commit = master
-
-PACKAGES += dh_date
-pkg_dh_date_name = dh_date
-pkg_dh_date_description = Date formatting / parsing library for erlang
-pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
-pkg_dh_date_fetch = git
-pkg_dh_date_repo = https://github.com/daleharvey/dh_date
-pkg_dh_date_commit = master
-
-PACKAGES += dirbusterl
-pkg_dirbusterl_name = dirbusterl
-pkg_dirbusterl_description = DirBuster successor in Erlang
-pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_fetch = git
-pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
-pkg_dirbusterl_commit = master
-
-PACKAGES += dispcount
-pkg_dispcount_name = dispcount
-pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
-pkg_dispcount_homepage = https://github.com/ferd/dispcount
-pkg_dispcount_fetch = git
-pkg_dispcount_repo = https://github.com/ferd/dispcount
-pkg_dispcount_commit = master
-
-PACKAGES += dlhttpc
-pkg_dlhttpc_name = dlhttpc
-pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
-pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_fetch = git
-pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
-pkg_dlhttpc_commit = master
-
-PACKAGES += dns
-pkg_dns_name = dns
-pkg_dns_description = Erlang DNS library
-pkg_dns_homepage = https://github.com/aetrion/dns_erlang
-pkg_dns_fetch = git
-pkg_dns_repo = https://github.com/aetrion/dns_erlang
-pkg_dns_commit = master
-
-PACKAGES += dnssd
-pkg_dnssd_name = dnssd
-pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
-pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_fetch = git
-pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
-pkg_dnssd_commit = master
-
-PACKAGES += dynamic_compile
-pkg_dynamic_compile_name = dynamic_compile
-pkg_dynamic_compile_description = compile and load erlang modules from string input
-pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_fetch = git
-pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
-pkg_dynamic_compile_commit = master
-
-PACKAGES += e2
-pkg_e2_name = e2
-pkg_e2_description = Library to simply writing correct OTP applications.
-pkg_e2_homepage = http://e2project.org
-pkg_e2_fetch = git
-pkg_e2_repo = https://github.com/gar1t/e2
-pkg_e2_commit = master
-
-PACKAGES += eamf
-pkg_eamf_name = eamf
-pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
-pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_fetch = git
-pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
-pkg_eamf_commit = master
-
-PACKAGES += eavro
-pkg_eavro_name = eavro
-pkg_eavro_description = Apache Avro encoder/decoder
-pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_fetch = git
-pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
-pkg_eavro_commit = master
-
-PACKAGES += ecapnp
-pkg_ecapnp_name = ecapnp
-pkg_ecapnp_description = Cap'n Proto library for Erlang
-pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
-pkg_ecapnp_fetch = git
-pkg_ecapnp_repo = https://github.com/kaos/ecapnp
-pkg_ecapnp_commit = master
-
-PACKAGES += econfig
-pkg_econfig_name = econfig
-pkg_econfig_description = simple Erlang config handler using INI files
-pkg_econfig_homepage = https://github.com/benoitc/econfig
-pkg_econfig_fetch = git
-pkg_econfig_repo = https://github.com/benoitc/econfig
-pkg_econfig_commit = master
-
-PACKAGES += edate
-pkg_edate_name = edate
-pkg_edate_description = date manipulation library for erlang
-pkg_edate_homepage = https://github.com/dweldon/edate
-pkg_edate_fetch = git
-pkg_edate_repo = https://github.com/dweldon/edate
-pkg_edate_commit = master
-
-PACKAGES += edgar
-pkg_edgar_name = edgar
-pkg_edgar_description = Erlang Does GNU AR
-pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
-pkg_edgar_fetch = git
-pkg_edgar_repo = https://github.com/crownedgrouse/edgar
-pkg_edgar_commit = master
-
-PACKAGES += edis
-pkg_edis_name = edis
-pkg_edis_description = An Erlang implementation of Redis KV Store
-pkg_edis_homepage = http://inaka.github.com/edis/
-pkg_edis_fetch = git
-pkg_edis_repo = https://github.com/inaka/edis
-pkg_edis_commit = master
-
-PACKAGES += edns
-pkg_edns_name = edns
-pkg_edns_description = Erlang/OTP DNS server
-pkg_edns_homepage = https://github.com/hcvst/erlang-dns
-pkg_edns_fetch = git
-pkg_edns_repo = https://github.com/hcvst/erlang-dns
-pkg_edns_commit = master
-
-PACKAGES += edown
-pkg_edown_name = edown
-pkg_edown_description = EDoc extension for generating Github-flavored Markdown
-pkg_edown_homepage = https://github.com/uwiger/edown
-pkg_edown_fetch = git
-pkg_edown_repo = https://github.com/uwiger/edown
-pkg_edown_commit = master
-
-PACKAGES += eep
-pkg_eep_name = eep
-pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
-pkg_eep_homepage = https://github.com/virtan/eep
-pkg_eep_fetch = git
-pkg_eep_repo = https://github.com/virtan/eep
-pkg_eep_commit = master
-
-PACKAGES += eep_app
-pkg_eep_app_name = eep_app
-pkg_eep_app_description = Embedded Event Processing
-pkg_eep_app_homepage = https://github.com/darach/eep-erl
-pkg_eep_app_fetch = git
-pkg_eep_app_repo = https://github.com/darach/eep-erl
-pkg_eep_app_commit = master
-
-PACKAGES += efene
-pkg_efene_name = efene
-pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
-pkg_efene_homepage = https://github.com/efene/efene
-pkg_efene_fetch = git
-pkg_efene_repo = https://github.com/efene/efene
-pkg_efene_commit = master
-
-PACKAGES += egeoip
-pkg_egeoip_name = egeoip
-pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
-pkg_egeoip_homepage = https://github.com/mochi/egeoip
-pkg_egeoip_fetch = git
-pkg_egeoip_repo = https://github.com/mochi/egeoip
-pkg_egeoip_commit = master
-
-PACKAGES += ehsa
-pkg_ehsa_name = ehsa
-pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
-pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_fetch = hg
-pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
-pkg_ehsa_commit = default
-
-PACKAGES += ej
-pkg_ej_name = ej
-pkg_ej_description = Helper module for working with Erlang terms representing JSON
-pkg_ej_homepage = https://github.com/seth/ej
-pkg_ej_fetch = git
-pkg_ej_repo = https://github.com/seth/ej
-pkg_ej_commit = master
-
-PACKAGES += ejabberd
-pkg_ejabberd_name = ejabberd
-pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
-pkg_ejabberd_homepage = https://github.com/processone/ejabberd
-pkg_ejabberd_fetch = git
-pkg_ejabberd_repo = https://github.com/processone/ejabberd
-pkg_ejabberd_commit = master
-
-PACKAGES += ejwt
-pkg_ejwt_name = ejwt
-pkg_ejwt_description = erlang library for JSON Web Token
-pkg_ejwt_homepage = https://github.com/artefactop/ejwt
-pkg_ejwt_fetch = git
-pkg_ejwt_repo = https://github.com/artefactop/ejwt
-pkg_ejwt_commit = master
-
-PACKAGES += ekaf
-pkg_ekaf_name = ekaf
-pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
-pkg_ekaf_homepage = https://github.com/helpshift/ekaf
-pkg_ekaf_fetch = git
-pkg_ekaf_repo = https://github.com/helpshift/ekaf
-pkg_ekaf_commit = master
-
-PACKAGES += elarm
-pkg_elarm_name = elarm
-pkg_elarm_description = Alarm Manager for Erlang.
-pkg_elarm_homepage = https://github.com/esl/elarm
-pkg_elarm_fetch = git
-pkg_elarm_repo = https://github.com/esl/elarm
-pkg_elarm_commit = master
-
-PACKAGES += eleveldb
-pkg_eleveldb_name = eleveldb
-pkg_eleveldb_description = Erlang LevelDB API
-pkg_eleveldb_homepage = https://github.com/basho/eleveldb
-pkg_eleveldb_fetch = git
-pkg_eleveldb_repo = https://github.com/basho/eleveldb
-pkg_eleveldb_commit = master
-
-PACKAGES += elixir
-pkg_elixir_name = elixir
-pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
-pkg_elixir_homepage = https://elixir-lang.org/
-pkg_elixir_fetch = git
-pkg_elixir_repo = https://github.com/elixir-lang/elixir
-pkg_elixir_commit = master
-
-PACKAGES += elli
-pkg_elli_name = elli
-pkg_elli_description = Simple, robust and performant Erlang web server
-pkg_elli_homepage = https://github.com/elli-lib/elli
-pkg_elli_fetch = git
-pkg_elli_repo = https://github.com/elli-lib/elli
-pkg_elli_commit = master
-
-PACKAGES += elvis
-pkg_elvis_name = elvis
-pkg_elvis_description = Erlang Style Reviewer
-pkg_elvis_homepage = https://github.com/inaka/elvis
-pkg_elvis_fetch = git
-pkg_elvis_repo = https://github.com/inaka/elvis
-pkg_elvis_commit = master
-
-PACKAGES += emagick
-pkg_emagick_name = emagick
-pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
-pkg_emagick_homepage = https://github.com/kivra/emagick
-pkg_emagick_fetch = git
-pkg_emagick_repo = https://github.com/kivra/emagick
-pkg_emagick_commit = master
-
-PACKAGES += emysql
-pkg_emysql_name = emysql
-pkg_emysql_description = Stable, pure Erlang MySQL driver.
-pkg_emysql_homepage = https://github.com/Eonblast/Emysql
-pkg_emysql_fetch = git
-pkg_emysql_repo = https://github.com/Eonblast/Emysql
-pkg_emysql_commit = master
-
-PACKAGES += enm
-pkg_enm_name = enm
-pkg_enm_description = Erlang driver for nanomsg
-pkg_enm_homepage = https://github.com/basho/enm
-pkg_enm_fetch = git
-pkg_enm_repo = https://github.com/basho/enm
-pkg_enm_commit = master
-
-PACKAGES += entop
-pkg_entop_name = entop
-pkg_entop_description = A top-like tool for monitoring an Erlang node
-pkg_entop_homepage = https://github.com/mazenharake/entop
-pkg_entop_fetch = git
-pkg_entop_repo = https://github.com/mazenharake/entop
-pkg_entop_commit = master
-
-PACKAGES += epcap
-pkg_epcap_name = epcap
-pkg_epcap_description = Erlang packet capture interface using pcap
-pkg_epcap_homepage = https://github.com/msantos/epcap
-pkg_epcap_fetch = git
-pkg_epcap_repo = https://github.com/msantos/epcap
-pkg_epcap_commit = master
-
-PACKAGES += eper
-pkg_eper_name = eper
-pkg_eper_description = Erlang performance and debugging tools.
-pkg_eper_homepage = https://github.com/massemanet/eper
-pkg_eper_fetch = git
-pkg_eper_repo = https://github.com/massemanet/eper
-pkg_eper_commit = master
-
-PACKAGES += epgsql
-pkg_epgsql_name = epgsql
-pkg_epgsql_description = Erlang PostgreSQL client library.
-pkg_epgsql_homepage = https://github.com/epgsql/epgsql
-pkg_epgsql_fetch = git
-pkg_epgsql_repo = https://github.com/epgsql/epgsql
-pkg_epgsql_commit = master
-
-PACKAGES += episcina
-pkg_episcina_name = episcina
-pkg_episcina_description = A simple non intrusive resource pool for connections
-pkg_episcina_homepage = https://github.com/erlware/episcina
-pkg_episcina_fetch = git
-pkg_episcina_repo = https://github.com/erlware/episcina
-pkg_episcina_commit = master
-
-PACKAGES += eplot
-pkg_eplot_name = eplot
-pkg_eplot_description = A plot engine written in erlang.
-pkg_eplot_homepage = https://github.com/psyeugenic/eplot
-pkg_eplot_fetch = git
-pkg_eplot_repo = https://github.com/psyeugenic/eplot
-pkg_eplot_commit = master
-
-PACKAGES += epocxy
-pkg_epocxy_name = epocxy
-pkg_epocxy_description = Erlang Patterns of Concurrency
-pkg_epocxy_homepage = https://github.com/duomark/epocxy
-pkg_epocxy_fetch = git
-pkg_epocxy_repo = https://github.com/duomark/epocxy
-pkg_epocxy_commit = master
-
-PACKAGES += epubnub
-pkg_epubnub_name = epubnub
-pkg_epubnub_description = Erlang PubNub API
-pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
-pkg_epubnub_fetch = git
-pkg_epubnub_repo = https://github.com/tsloughter/epubnub
-pkg_epubnub_commit = master
-
-PACKAGES += eqm
-pkg_eqm_name = eqm
-pkg_eqm_description = Erlang pub sub with supply-demand channels
-pkg_eqm_homepage = https://github.com/loucash/eqm
-pkg_eqm_fetch = git
-pkg_eqm_repo = https://github.com/loucash/eqm
-pkg_eqm_commit = master
-
-PACKAGES += eredis
-pkg_eredis_name = eredis
-pkg_eredis_description = Erlang Redis client
-pkg_eredis_homepage = https://github.com/wooga/eredis
-pkg_eredis_fetch = git
-pkg_eredis_repo = https://github.com/wooga/eredis
-pkg_eredis_commit = master
-
-PACKAGES += eredis_pool
-pkg_eredis_pool_name = eredis_pool
-pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
-pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_fetch = git
-pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
-pkg_eredis_pool_commit = master
-
-PACKAGES += erl_streams
-pkg_erl_streams_name = erl_streams
-pkg_erl_streams_description = Streams in Erlang
-pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
-pkg_erl_streams_fetch = git
-pkg_erl_streams_repo = https://github.com/epappas/erl_streams
-pkg_erl_streams_commit = master
-
-PACKAGES += erlang_cep
-pkg_erlang_cep_name = erlang_cep
-pkg_erlang_cep_description = A basic CEP package written in erlang
-pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_fetch = git
-pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
-pkg_erlang_cep_commit = master
-
-PACKAGES += erlang_js
-pkg_erlang_js_name = erlang_js
-pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
-pkg_erlang_js_homepage = https://github.com/basho/erlang_js
-pkg_erlang_js_fetch = git
-pkg_erlang_js_repo = https://github.com/basho/erlang_js
-pkg_erlang_js_commit = master
-
-PACKAGES += erlang_localtime
-pkg_erlang_localtime_name = erlang_localtime
-pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
-pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_fetch = git
-pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
-pkg_erlang_localtime_commit = master
-
-PACKAGES += erlang_smtp
-pkg_erlang_smtp_name = erlang_smtp
-pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
-pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_fetch = git
-pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
-pkg_erlang_smtp_commit = master
-
-PACKAGES += erlang_term
-pkg_erlang_term_name = erlang_term
-pkg_erlang_term_description = Erlang Term Info
-pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
-pkg_erlang_term_fetch = git
-pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
-pkg_erlang_term_commit = master
-
-PACKAGES += erlastic_search
-pkg_erlastic_search_name = erlastic_search
-pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
-pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_fetch = git
-pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
-pkg_erlastic_search_commit = master
-
-PACKAGES += erlasticsearch
-pkg_erlasticsearch_name = erlasticsearch
-pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
-pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_fetch = git
-pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
-pkg_erlasticsearch_commit = master
-
-PACKAGES += erlbrake
-pkg_erlbrake_name = erlbrake
-pkg_erlbrake_description = Erlang Airbrake notification client
-pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_fetch = git
-pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
-pkg_erlbrake_commit = master
-
-PACKAGES += erlcloud
-pkg_erlcloud_name = erlcloud
-pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
-pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
-pkg_erlcloud_fetch = git
-pkg_erlcloud_repo = https://github.com/gleber/erlcloud
-pkg_erlcloud_commit = master
-
-PACKAGES += erlcron
-pkg_erlcron_name = erlcron
-pkg_erlcron_description = Erlang cronish system
-pkg_erlcron_homepage = https://github.com/erlware/erlcron
-pkg_erlcron_fetch = git
-pkg_erlcron_repo = https://github.com/erlware/erlcron
-pkg_erlcron_commit = master
-
-PACKAGES += erldb
-pkg_erldb_name = erldb
-pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
-pkg_erldb_homepage = http://erldb.org
-pkg_erldb_fetch = git
-pkg_erldb_repo = https://github.com/erldb/erldb
-pkg_erldb_commit = master
-
-PACKAGES += erldis
-pkg_erldis_name = erldis
-pkg_erldis_description = redis erlang client library
-pkg_erldis_homepage = https://github.com/cstar/erldis
-pkg_erldis_fetch = git
-pkg_erldis_repo = https://github.com/cstar/erldis
-pkg_erldis_commit = master
-
-PACKAGES += erldns
-pkg_erldns_name = erldns
-pkg_erldns_description = DNS server, in erlang.
-pkg_erldns_homepage = https://github.com/aetrion/erl-dns
-pkg_erldns_fetch = git
-pkg_erldns_repo = https://github.com/aetrion/erl-dns
-pkg_erldns_commit = master
-
-PACKAGES += erldocker
-pkg_erldocker_name = erldocker
-pkg_erldocker_description = Docker Remote API client for Erlang
-pkg_erldocker_homepage = https://github.com/proger/erldocker
-pkg_erldocker_fetch = git
-pkg_erldocker_repo = https://github.com/proger/erldocker
-pkg_erldocker_commit = master
-
-PACKAGES += erlfsmon
-pkg_erlfsmon_name = erlfsmon
-pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
-pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
-pkg_erlfsmon_fetch = git
-pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
-pkg_erlfsmon_commit = master
-
-PACKAGES += erlgit
-pkg_erlgit_name = erlgit
-pkg_erlgit_description = Erlang convenience wrapper around git executable
-pkg_erlgit_homepage = https://github.com/gleber/erlgit
-pkg_erlgit_fetch = git
-pkg_erlgit_repo = https://github.com/gleber/erlgit
-pkg_erlgit_commit = master
-
-PACKAGES += erlguten
-pkg_erlguten_name = erlguten
-pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
-pkg_erlguten_homepage = https://github.com/richcarl/erlguten
-pkg_erlguten_fetch = git
-pkg_erlguten_repo = https://github.com/richcarl/erlguten
-pkg_erlguten_commit = master
-
-PACKAGES += erlmc
-pkg_erlmc_name = erlmc
-pkg_erlmc_description = Erlang memcached binary protocol client
-pkg_erlmc_homepage = https://github.com/jkvor/erlmc
-pkg_erlmc_fetch = git
-pkg_erlmc_repo = https://github.com/jkvor/erlmc
-pkg_erlmc_commit = master
-
-PACKAGES += erlmongo
-pkg_erlmongo_name = erlmongo
-pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
-pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_fetch = git
-pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
-pkg_erlmongo_commit = master
-
-PACKAGES += erlog
-pkg_erlog_name = erlog
-pkg_erlog_description = Prolog interpreter in and for Erlang
-pkg_erlog_homepage = https://github.com/rvirding/erlog
-pkg_erlog_fetch = git
-pkg_erlog_repo = https://github.com/rvirding/erlog
-pkg_erlog_commit = master
-
-PACKAGES += erlpass
-pkg_erlpass_name = erlpass
-pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
-pkg_erlpass_homepage = https://github.com/ferd/erlpass
-pkg_erlpass_fetch = git
-pkg_erlpass_repo = https://github.com/ferd/erlpass
-pkg_erlpass_commit = master
-
-PACKAGES += erlport
-pkg_erlport_name = erlport
-pkg_erlport_description = ErlPort - connect Erlang to other languages
-pkg_erlport_homepage = https://github.com/hdima/erlport
-pkg_erlport_fetch = git
-pkg_erlport_repo = https://github.com/hdima/erlport
-pkg_erlport_commit = master
-
-PACKAGES += erlsh
-pkg_erlsh_name = erlsh
-pkg_erlsh_description = Erlang shell tools
-pkg_erlsh_homepage = https://github.com/proger/erlsh
-pkg_erlsh_fetch = git
-pkg_erlsh_repo = https://github.com/proger/erlsh
-pkg_erlsh_commit = master
-
-PACKAGES += erlsha2
-pkg_erlsha2_name = erlsha2
-pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
-pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
-pkg_erlsha2_fetch = git
-pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
-pkg_erlsha2_commit = master
-
-PACKAGES += erlsom
-pkg_erlsom_name = erlsom
-pkg_erlsom_description = XML parser for Erlang
-pkg_erlsom_homepage = https://github.com/willemdj/erlsom
-pkg_erlsom_fetch = git
-pkg_erlsom_repo = https://github.com/willemdj/erlsom
-pkg_erlsom_commit = master
-
-PACKAGES += erlubi
-pkg_erlubi_name = erlubi
-pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
-pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
-pkg_erlubi_fetch = git
-pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
-pkg_erlubi_commit = master
-
-PACKAGES += erlvolt
-pkg_erlvolt_name = erlvolt
-pkg_erlvolt_description = VoltDB Erlang Client Driver
-pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_fetch = git
-pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
-pkg_erlvolt_commit = master
-
-PACKAGES += erlware_commons
-pkg_erlware_commons_name = erlware_commons
-pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
-pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_fetch = git
-pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
-pkg_erlware_commons_commit = master
-
-PACKAGES += erlydtl
-pkg_erlydtl_name = erlydtl
-pkg_erlydtl_description = Django Template Language for Erlang.
-pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_fetch = git
-pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
-pkg_erlydtl_commit = master
-
-PACKAGES += errd
-pkg_errd_name = errd
-pkg_errd_description = Erlang RRDTool library
-pkg_errd_homepage = https://github.com/archaelus/errd
-pkg_errd_fetch = git
-pkg_errd_repo = https://github.com/archaelus/errd
-pkg_errd_commit = master
-
-PACKAGES += erserve
-pkg_erserve_name = erserve
-pkg_erserve_description = Erlang/Rserve communication interface
-pkg_erserve_homepage = https://github.com/del/erserve
-pkg_erserve_fetch = git
-pkg_erserve_repo = https://github.com/del/erserve
-pkg_erserve_commit = master
-
-PACKAGES += erwa
-pkg_erwa_name = erwa
-pkg_erwa_description = A WAMP router and client written in Erlang.
-pkg_erwa_homepage = https://github.com/bwegh/erwa
-pkg_erwa_fetch = git
-pkg_erwa_repo = https://github.com/bwegh/erwa
-pkg_erwa_commit = master
-
-PACKAGES += escalus
-pkg_escalus_name = escalus
-pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
-pkg_escalus_homepage = https://github.com/esl/escalus
-pkg_escalus_fetch = git
-pkg_escalus_repo = https://github.com/esl/escalus
-pkg_escalus_commit = master
-
-PACKAGES += esh_mk
-pkg_esh_mk_name = esh_mk
-pkg_esh_mk_description = esh template engine plugin for erlang.mk
-pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
-pkg_esh_mk_fetch = git
-pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
-pkg_esh_mk_commit = master
-
-PACKAGES += espec
-pkg_espec_name = espec
-pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
-pkg_espec_homepage = https://github.com/lucaspiller/espec
-pkg_espec_fetch = git
-pkg_espec_repo = https://github.com/lucaspiller/espec
-pkg_espec_commit = master
-
-PACKAGES += estatsd
-pkg_estatsd_name = estatsd
-pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
-pkg_estatsd_homepage = https://github.com/RJ/estatsd
-pkg_estatsd_fetch = git
-pkg_estatsd_repo = https://github.com/RJ/estatsd
-pkg_estatsd_commit = master
-
-PACKAGES += etap
-pkg_etap_name = etap
-pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
-pkg_etap_homepage = https://github.com/ngerakines/etap
-pkg_etap_fetch = git
-pkg_etap_repo = https://github.com/ngerakines/etap
-pkg_etap_commit = master
-
-PACKAGES += etest
-pkg_etest_name = etest
-pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
-pkg_etest_homepage = https://github.com/wooga/etest
-pkg_etest_fetch = git
-pkg_etest_repo = https://github.com/wooga/etest
-pkg_etest_commit = master
-
-PACKAGES += etest_http
-pkg_etest_http_name = etest_http
-pkg_etest_http_description = etest Assertions around HTTP (client-side)
-pkg_etest_http_homepage = https://github.com/wooga/etest_http
-pkg_etest_http_fetch = git
-pkg_etest_http_repo = https://github.com/wooga/etest_http
-pkg_etest_http_commit = master
-
-PACKAGES += etoml
-pkg_etoml_name = etoml
-pkg_etoml_description = TOML language erlang parser
-pkg_etoml_homepage = https://github.com/kalta/etoml
-pkg_etoml_fetch = git
-pkg_etoml_repo = https://github.com/kalta/etoml
-pkg_etoml_commit = master
-
-PACKAGES += eunit
-pkg_eunit_name = eunit
-pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
-pkg_eunit_homepage = https://github.com/richcarl/eunit
-pkg_eunit_fetch = git
-pkg_eunit_repo = https://github.com/richcarl/eunit
-pkg_eunit_commit = master
-
-PACKAGES += eunit_formatters
-pkg_eunit_formatters_name = eunit_formatters
-pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
-pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_fetch = git
-pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
-pkg_eunit_formatters_commit = master
-
-PACKAGES += euthanasia
-pkg_euthanasia_name = euthanasia
-pkg_euthanasia_description = Merciful killer for your Erlang processes
-pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_fetch = git
-pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
-pkg_euthanasia_commit = master
-
-PACKAGES += evum
-pkg_evum_name = evum
-pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
-pkg_evum_homepage = https://github.com/msantos/evum
-pkg_evum_fetch = git
-pkg_evum_repo = https://github.com/msantos/evum
-pkg_evum_commit = master
-
-PACKAGES += exec
-pkg_exec_name = erlexec
-pkg_exec_description = Execute and control OS processes from Erlang/OTP.
-pkg_exec_homepage = http://saleyn.github.com/erlexec
-pkg_exec_fetch = git
-pkg_exec_repo = https://github.com/saleyn/erlexec
-pkg_exec_commit = master
-
-PACKAGES += exml
-pkg_exml_name = exml
-pkg_exml_description = XML parsing library in Erlang
-pkg_exml_homepage = https://github.com/paulgray/exml
-pkg_exml_fetch = git
-pkg_exml_repo = https://github.com/paulgray/exml
-pkg_exml_commit = master
-
-PACKAGES += exometer
-pkg_exometer_name = exometer
-pkg_exometer_description = Basic measurement objects and probe behavior
-pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
-pkg_exometer_fetch = git
-pkg_exometer_repo = https://github.com/Feuerlabs/exometer
-pkg_exometer_commit = master
-
-PACKAGES += exs1024
-pkg_exs1024_name = exs1024
-pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
-pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
-pkg_exs1024_fetch = git
-pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
-pkg_exs1024_commit = master
-
-PACKAGES += exs64
-pkg_exs64_name = exs64
-pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
-pkg_exs64_homepage = https://github.com/jj1bdx/exs64
-pkg_exs64_fetch = git
-pkg_exs64_repo = https://github.com/jj1bdx/exs64
-pkg_exs64_commit = master
-
-PACKAGES += exsplus116
-pkg_exsplus116_name = exsplus116
-pkg_exsplus116_description = Xorshift116plus for Erlang
-pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_fetch = git
-pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
-pkg_exsplus116_commit = master
-
-PACKAGES += exsplus128
-pkg_exsplus128_name = exsplus128
-pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
-pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_fetch = git
-pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
-pkg_exsplus128_commit = master
-
-PACKAGES += ezmq
-pkg_ezmq_name = ezmq
-pkg_ezmq_description = zMQ implemented in Erlang
-pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_fetch = git
-pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
-pkg_ezmq_commit = master
-
-PACKAGES += ezmtp
-pkg_ezmtp_name = ezmtp
-pkg_ezmtp_description = ZMTP protocol in pure Erlang.
-pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
-pkg_ezmtp_fetch = git
-pkg_ezmtp_repo = https://github.com/a13x/ezmtp
-pkg_ezmtp_commit = master
-
-PACKAGES += fast_disk_log
-pkg_fast_disk_log_name = fast_disk_log
-pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
-pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_fetch = git
-pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
-pkg_fast_disk_log_commit = master
-
-PACKAGES += feeder
-pkg_feeder_name = feeder
-pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
-pkg_feeder_homepage = https://github.com/michaelnisi/feeder
-pkg_feeder_fetch = git
-pkg_feeder_repo = https://github.com/michaelnisi/feeder
-pkg_feeder_commit = master
-
-PACKAGES += find_crate
-pkg_find_crate_name = find_crate
-pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
-pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
-pkg_find_crate_fetch = git
-pkg_find_crate_repo = https://github.com/goertzenator/find_crate
-pkg_find_crate_commit = master
-
-PACKAGES += fix
-pkg_fix_name = fix
-pkg_fix_description = http://fixprotocol.org/ implementation.
-pkg_fix_homepage = https://github.com/maxlapshin/fix
-pkg_fix_fetch = git
-pkg_fix_repo = https://github.com/maxlapshin/fix
-pkg_fix_commit = master
-
-PACKAGES += flower
-pkg_flower_name = flower
-pkg_flower_description = FlowER - a Erlang OpenFlow development platform
-pkg_flower_homepage = https://github.com/travelping/flower
-pkg_flower_fetch = git
-pkg_flower_repo = https://github.com/travelping/flower
-pkg_flower_commit = master
-
-PACKAGES += fn
-pkg_fn_name = fn
-pkg_fn_description = Function utilities for Erlang
-pkg_fn_homepage = https://github.com/reiddraper/fn
-pkg_fn_fetch = git
-pkg_fn_repo = https://github.com/reiddraper/fn
-pkg_fn_commit = master
-
-PACKAGES += folsom
-pkg_folsom_name = folsom
-pkg_folsom_description = Expose Erlang Events and Metrics
-pkg_folsom_homepage = https://github.com/boundary/folsom
-pkg_folsom_fetch = git
-pkg_folsom_repo = https://github.com/boundary/folsom
-pkg_folsom_commit = master
-
-PACKAGES += folsom_cowboy
-pkg_folsom_cowboy_name = folsom_cowboy
-pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
-pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_fetch = git
-pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
-pkg_folsom_cowboy_commit = master
-
-PACKAGES += folsomite
-pkg_folsomite_name = folsomite
-pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
-pkg_folsomite_homepage = https://github.com/campanja/folsomite
-pkg_folsomite_fetch = git
-pkg_folsomite_repo = https://github.com/campanja/folsomite
-pkg_folsomite_commit = master
-
-PACKAGES += fs
-pkg_fs_name = fs
-pkg_fs_description = Erlang FileSystem Listener
-pkg_fs_homepage = https://github.com/synrc/fs
-pkg_fs_fetch = git
-pkg_fs_repo = https://github.com/synrc/fs
-pkg_fs_commit = master
-
-PACKAGES += fuse
-pkg_fuse_name = fuse
-pkg_fuse_description = A Circuit Breaker for Erlang
-pkg_fuse_homepage = https://github.com/jlouis/fuse
-pkg_fuse_fetch = git
-pkg_fuse_repo = https://github.com/jlouis/fuse
-pkg_fuse_commit = master
-
-PACKAGES += gcm
-pkg_gcm_name = gcm
-pkg_gcm_description = An Erlang application for Google Cloud Messaging
-pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
-pkg_gcm_fetch = git
-pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
-pkg_gcm_commit = master
-
-PACKAGES += gcprof
-pkg_gcprof_name = gcprof
-pkg_gcprof_description = Garbage Collection profiler for Erlang
-pkg_gcprof_homepage = https://github.com/knutin/gcprof
-pkg_gcprof_fetch = git
-pkg_gcprof_repo = https://github.com/knutin/gcprof
-pkg_gcprof_commit = master
-
-PACKAGES += geas
-pkg_geas_name = geas
-pkg_geas_description = Guess Erlang Application Scattering
-pkg_geas_homepage = https://github.com/crownedgrouse/geas
-pkg_geas_fetch = git
-pkg_geas_repo = https://github.com/crownedgrouse/geas
-pkg_geas_commit = master
-
-PACKAGES += geef
-pkg_geef_name = geef
-pkg_geef_description = Git NEEEEF (Erlang NIF)
-pkg_geef_homepage = https://github.com/carlosmn/geef
-pkg_geef_fetch = git
-pkg_geef_repo = https://github.com/carlosmn/geef
-pkg_geef_commit = master
-
-PACKAGES += gen_coap
-pkg_gen_coap_name = gen_coap
-pkg_gen_coap_description = Generic Erlang CoAP Client/Server
-pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_fetch = git
-pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
-pkg_gen_coap_commit = master
-
-PACKAGES += gen_cycle
-pkg_gen_cycle_name = gen_cycle
-pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
-pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_fetch = git
-pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
-pkg_gen_cycle_commit = develop
-
-PACKAGES += gen_icmp
-pkg_gen_icmp_name = gen_icmp
-pkg_gen_icmp_description = Erlang interface to ICMP sockets
-pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_fetch = git
-pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
-pkg_gen_icmp_commit = master
-
-PACKAGES += gen_leader
-pkg_gen_leader_name = gen_leader
-pkg_gen_leader_description = leader election behavior
-pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_fetch = git
-pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
-pkg_gen_leader_commit = master
-
-PACKAGES += gen_nb_server
-pkg_gen_nb_server_name = gen_nb_server
-pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
-pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_fetch = git
-pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
-pkg_gen_nb_server_commit = master
-
-PACKAGES += gen_paxos
-pkg_gen_paxos_name = gen_paxos
-pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
-pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_fetch = git
-pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
-pkg_gen_paxos_commit = master
-
-PACKAGES += gen_rpc
-pkg_gen_rpc_name = gen_rpc
-pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
-pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_fetch = git
-pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
-pkg_gen_rpc_commit = master
-
-PACKAGES += gen_smtp
-pkg_gen_smtp_name = gen_smtp
-pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
-pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_fetch = git
-pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
-pkg_gen_smtp_commit = master
-
-PACKAGES += gen_tracker
-pkg_gen_tracker_name = gen_tracker
-pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
-pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_fetch = git
-pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
-pkg_gen_tracker_commit = master
-
-PACKAGES += gen_unix
-pkg_gen_unix_name = gen_unix
-pkg_gen_unix_description = Erlang Unix socket interface
-pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
-pkg_gen_unix_fetch = git
-pkg_gen_unix_repo = https://github.com/msantos/gen_unix
-pkg_gen_unix_commit = master
-
-PACKAGES += geode
-pkg_geode_name = geode
-pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
-pkg_geode_homepage = https://github.com/bradfordw/geode
-pkg_geode_fetch = git
-pkg_geode_repo = https://github.com/bradfordw/geode
-pkg_geode_commit = master
-
-PACKAGES += getopt
-pkg_getopt_name = getopt
-pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
-pkg_getopt_homepage = https://github.com/jcomellas/getopt
-pkg_getopt_fetch = git
-pkg_getopt_repo = https://github.com/jcomellas/getopt
-pkg_getopt_commit = master
-
-PACKAGES += gettext
-pkg_gettext_name = gettext
-pkg_gettext_description = Erlang internationalization library.
-pkg_gettext_homepage = https://github.com/etnt/gettext
-pkg_gettext_fetch = git
-pkg_gettext_repo = https://github.com/etnt/gettext
-pkg_gettext_commit = master
-
-PACKAGES += giallo
-pkg_giallo_name = giallo
-pkg_giallo_description = Small and flexible web framework on top of Cowboy
-pkg_giallo_homepage = https://github.com/kivra/giallo
-pkg_giallo_fetch = git
-pkg_giallo_repo = https://github.com/kivra/giallo
-pkg_giallo_commit = master
-
-PACKAGES += gin
-pkg_gin_name = gin
-pkg_gin_description = The guards and for Erlang parse_transform
-pkg_gin_homepage = https://github.com/mad-cocktail/gin
-pkg_gin_fetch = git
-pkg_gin_repo = https://github.com/mad-cocktail/gin
-pkg_gin_commit = master
-
-PACKAGES += gitty
-pkg_gitty_name = gitty
-pkg_gitty_description = Git access in erlang
-pkg_gitty_homepage = https://github.com/maxlapshin/gitty
-pkg_gitty_fetch = git
-pkg_gitty_repo = https://github.com/maxlapshin/gitty
-pkg_gitty_commit = master
-
-PACKAGES += gold_fever
-pkg_gold_fever_name = gold_fever
-pkg_gold_fever_description = A Treasure Hunt for Erlangers
-pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
-pkg_gold_fever_fetch = git
-pkg_gold_fever_repo = https://github.com/inaka/gold_fever
-pkg_gold_fever_commit = master
-
-PACKAGES += gpb
-pkg_gpb_name = gpb
-pkg_gpb_description = A Google Protobuf implementation for Erlang
-pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_fetch = git
-pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
-pkg_gpb_commit = master
-
-PACKAGES += gproc
-pkg_gproc_name = gproc
-pkg_gproc_description = Extended process registry for Erlang
-pkg_gproc_homepage = https://github.com/uwiger/gproc
-pkg_gproc_fetch = git
-pkg_gproc_repo = https://github.com/uwiger/gproc
-pkg_gproc_commit = master
-
-PACKAGES += grapherl
-pkg_grapherl_name = grapherl
-pkg_grapherl_description = Create graphs of Erlang systems and programs
-pkg_grapherl_homepage = https://github.com/eproxus/grapherl
-pkg_grapherl_fetch = git
-pkg_grapherl_repo = https://github.com/eproxus/grapherl
-pkg_grapherl_commit = master
-
-PACKAGES += grpc
-pkg_grpc_name = grpc
-pkg_grpc_description = gRPC server in Erlang
-pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_fetch = git
-pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
-pkg_grpc_commit = master
-
-PACKAGES += grpc_client
-pkg_grpc_client_name = grpc_client
-pkg_grpc_client_description = gRPC client in Erlang
-pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_fetch = git
-pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
-pkg_grpc_client_commit = master
-
-PACKAGES += gun
-pkg_gun_name = gun
-pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
-pkg_gun_homepage = http//ninenines.eu
-pkg_gun_fetch = git
-pkg_gun_repo = https://github.com/ninenines/gun
-pkg_gun_commit = master
-
-PACKAGES += gut
-pkg_gut_name = gut
-pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
-pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
-pkg_gut_fetch = git
-pkg_gut_repo = https://github.com/unbalancedparentheses/gut
-pkg_gut_commit = master
-
-PACKAGES += hackney
-pkg_hackney_name = hackney
-pkg_hackney_description = simple HTTP client in Erlang
-pkg_hackney_homepage = https://github.com/benoitc/hackney
-pkg_hackney_fetch = git
-pkg_hackney_repo = https://github.com/benoitc/hackney
-pkg_hackney_commit = master
-
-PACKAGES += hamcrest
-pkg_hamcrest_name = hamcrest
-pkg_hamcrest_description = Erlang port of Hamcrest
-pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_fetch = git
-pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
-pkg_hamcrest_commit = master
-
-PACKAGES += hanoidb
-pkg_hanoidb_name = hanoidb
-pkg_hanoidb_description = Erlang LSM BTree Storage
-pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_fetch = git
-pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
-pkg_hanoidb_commit = master
-
-PACKAGES += hottub
-pkg_hottub_name = hottub
-pkg_hottub_description = Permanent Erlang Worker Pool
-pkg_hottub_homepage = https://github.com/bfrog/hottub
-pkg_hottub_fetch = git
-pkg_hottub_repo = https://github.com/bfrog/hottub
-pkg_hottub_commit = master
-
-PACKAGES += hpack
-pkg_hpack_name = hpack
-pkg_hpack_description = HPACK Implementation for Erlang
-pkg_hpack_homepage = https://github.com/joedevivo/hpack
-pkg_hpack_fetch = git
-pkg_hpack_repo = https://github.com/joedevivo/hpack
-pkg_hpack_commit = master
-
-PACKAGES += hyper
-pkg_hyper_name = hyper
-pkg_hyper_description = Erlang implementation of HyperLogLog
-pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
-pkg_hyper_fetch = git
-pkg_hyper_repo = https://github.com/GameAnalytics/hyper
-pkg_hyper_commit = master
-
-PACKAGES += i18n
-pkg_i18n_name = i18n
-pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
-pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
-pkg_i18n_fetch = git
-pkg_i18n_repo = https://github.com/erlang-unicode/i18n
-pkg_i18n_commit = master
-
-PACKAGES += ibrowse
-pkg_ibrowse_name = ibrowse
-pkg_ibrowse_description = Erlang HTTP client
-pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_fetch = git
-pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
-pkg_ibrowse_commit = master
-
-PACKAGES += idna
-pkg_idna_name = idna
-pkg_idna_description = Erlang IDNA lib
-pkg_idna_homepage = https://github.com/benoitc/erlang-idna
-pkg_idna_fetch = git
-pkg_idna_repo = https://github.com/benoitc/erlang-idna
-pkg_idna_commit = master
-
-PACKAGES += ierlang
-pkg_ierlang_name = ierlang
-pkg_ierlang_description = An Erlang language kernel for IPython.
-pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
-pkg_ierlang_fetch = git
-pkg_ierlang_repo = https://github.com/robbielynch/ierlang
-pkg_ierlang_commit = master
-
-PACKAGES += iota
-pkg_iota_name = iota
-pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
-pkg_iota_homepage = https://github.com/jpgneves/iota
-pkg_iota_fetch = git
-pkg_iota_repo = https://github.com/jpgneves/iota
-pkg_iota_commit = master
-
-PACKAGES += irc_lib
-pkg_irc_lib_name = irc_lib
-pkg_irc_lib_description = Erlang irc client library
-pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_fetch = git
-pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
-pkg_irc_lib_commit = master
-
-PACKAGES += ircd
-pkg_ircd_name = ircd
-pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
-pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
-pkg_ircd_fetch = git
-pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
-pkg_ircd_commit = master
-
-PACKAGES += iris
-pkg_iris_name = iris
-pkg_iris_description = Iris Erlang binding
-pkg_iris_homepage = https://github.com/project-iris/iris-erl
-pkg_iris_fetch = git
-pkg_iris_repo = https://github.com/project-iris/iris-erl
-pkg_iris_commit = master
-
-PACKAGES += iso8601
-pkg_iso8601_name = iso8601
-pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
-pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_fetch = git
-pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
-pkg_iso8601_commit = master
-
-PACKAGES += jamdb_sybase
-pkg_jamdb_sybase_name = jamdb_sybase
-pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
-pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_fetch = git
-pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
-pkg_jamdb_sybase_commit = master
-
-PACKAGES += jerg
-pkg_jerg_name = jerg
-pkg_jerg_description = JSON Schema to Erlang Records Generator
-pkg_jerg_homepage = https://github.com/ddossot/jerg
-pkg_jerg_fetch = git
-pkg_jerg_repo = https://github.com/ddossot/jerg
-pkg_jerg_commit = master
-
-PACKAGES += jesse
-pkg_jesse_name = jesse
-pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
-pkg_jesse_homepage = https://github.com/for-GET/jesse
-pkg_jesse_fetch = git
-pkg_jesse_repo = https://github.com/for-GET/jesse
-pkg_jesse_commit = master
-
-PACKAGES += jiffy
-pkg_jiffy_name = jiffy
-pkg_jiffy_description = JSON NIFs for Erlang.
-pkg_jiffy_homepage = https://github.com/davisp/jiffy
-pkg_jiffy_fetch = git
-pkg_jiffy_repo = https://github.com/davisp/jiffy
-pkg_jiffy_commit = master
-
-PACKAGES += jiffy_v
-pkg_jiffy_v_name = jiffy_v
-pkg_jiffy_v_description = JSON validation utility
-pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_fetch = git
-pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
-pkg_jiffy_v_commit = master
-
-PACKAGES += jobs
-pkg_jobs_name = jobs
-pkg_jobs_description = a Job scheduler for load regulation
-pkg_jobs_homepage = https://github.com/esl/jobs
-pkg_jobs_fetch = git
-pkg_jobs_repo = https://github.com/esl/jobs
-pkg_jobs_commit = master
-
-PACKAGES += joxa
-pkg_joxa_name = joxa
-pkg_joxa_description = A Modern Lisp for the Erlang VM
-pkg_joxa_homepage = https://github.com/joxa/joxa
-pkg_joxa_fetch = git
-pkg_joxa_repo = https://github.com/joxa/joxa
-pkg_joxa_commit = master
-
-PACKAGES += json
-pkg_json_name = json
-pkg_json_description = a high level json library for erlang (17.0+)
-pkg_json_homepage = https://github.com/talentdeficit/json
-pkg_json_fetch = git
-pkg_json_repo = https://github.com/talentdeficit/json
-pkg_json_commit = master
-
-PACKAGES += json_rec
-pkg_json_rec_name = json_rec
-pkg_json_rec_description = JSON to erlang record
-pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
-pkg_json_rec_fetch = git
-pkg_json_rec_repo = https://github.com/justinkirby/json_rec
-pkg_json_rec_commit = master
-
-PACKAGES += jsone
-pkg_jsone_name = jsone
-pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
-pkg_jsone_homepage = https://github.com/sile/jsone.git
-pkg_jsone_fetch = git
-pkg_jsone_repo = https://github.com/sile/jsone.git
-pkg_jsone_commit = master
-
-PACKAGES += jsonerl
-pkg_jsonerl_name = jsonerl
-pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
-pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
-pkg_jsonerl_fetch = git
-pkg_jsonerl_repo = https://github.com/lambder/jsonerl
-pkg_jsonerl_commit = master
-
-PACKAGES += jsonpath
-pkg_jsonpath_name = jsonpath
-pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
-pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_fetch = git
-pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
-pkg_jsonpath_commit = master
-
-PACKAGES += jsonx
-pkg_jsonx_name = jsonx
-pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
-pkg_jsonx_homepage = https://github.com/iskra/jsonx
-pkg_jsonx_fetch = git
-pkg_jsonx_repo = https://github.com/iskra/jsonx
-pkg_jsonx_commit = master
-
-PACKAGES += jsx
-pkg_jsx_name = jsx
-pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
-pkg_jsx_homepage = https://github.com/talentdeficit/jsx
-pkg_jsx_fetch = git
-pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
-
-PACKAGES += kafka
-pkg_kafka_name = kafka
-pkg_kafka_description = Kafka consumer and producer in Erlang
-pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
-pkg_kafka_fetch = git
-pkg_kafka_repo = https://github.com/wooga/kafka-erlang
-pkg_kafka_commit = master
-
-PACKAGES += kafka_protocol
-pkg_kafka_protocol_name = kafka_protocol
-pkg_kafka_protocol_description = Kafka protocol Erlang library
-pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
-pkg_kafka_protocol_fetch = git
-pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
-pkg_kafka_protocol_commit = master
-
-PACKAGES += kai
-pkg_kai_name = kai
-pkg_kai_description = DHT storage by Takeshi Inoue
-pkg_kai_homepage = https://github.com/synrc/kai
-pkg_kai_fetch = git
-pkg_kai_repo = https://github.com/synrc/kai
-pkg_kai_commit = master
-
-PACKAGES += katja
-pkg_katja_name = katja
-pkg_katja_description = A simple Riemann client written in Erlang.
-pkg_katja_homepage = https://github.com/nifoc/katja
-pkg_katja_fetch = git
-pkg_katja_repo = https://github.com/nifoc/katja
-pkg_katja_commit = master
-
-PACKAGES += kdht
-pkg_kdht_name = kdht
-pkg_kdht_description = kdht is an erlang DHT implementation
-pkg_kdht_homepage = https://github.com/kevinlynx/kdht
-pkg_kdht_fetch = git
-pkg_kdht_repo = https://github.com/kevinlynx/kdht
-pkg_kdht_commit = master
-
-PACKAGES += key2value
-pkg_key2value_name = key2value
-pkg_key2value_description = Erlang 2-way map
-pkg_key2value_homepage = https://github.com/okeuday/key2value
-pkg_key2value_fetch = git
-pkg_key2value_repo = https://github.com/okeuday/key2value
-pkg_key2value_commit = master
-
-PACKAGES += keys1value
-pkg_keys1value_name = keys1value
-pkg_keys1value_description = Erlang set associative map for key lists
-pkg_keys1value_homepage = https://github.com/okeuday/keys1value
-pkg_keys1value_fetch = git
-pkg_keys1value_repo = https://github.com/okeuday/keys1value
-pkg_keys1value_commit = master
-
-PACKAGES += kinetic
-pkg_kinetic_name = kinetic
-pkg_kinetic_description = Erlang Kinesis Client
-pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
-pkg_kinetic_fetch = git
-pkg_kinetic_repo = https://github.com/AdRoll/kinetic
-pkg_kinetic_commit = master
-
-PACKAGES += kjell
-pkg_kjell_name = kjell
-pkg_kjell_description = Erlang Shell
-pkg_kjell_homepage = https://github.com/karlll/kjell
-pkg_kjell_fetch = git
-pkg_kjell_repo = https://github.com/karlll/kjell
-pkg_kjell_commit = master
-
-PACKAGES += kraken
-pkg_kraken_name = kraken
-pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
-pkg_kraken_homepage = https://github.com/Asana/kraken
-pkg_kraken_fetch = git
-pkg_kraken_repo = https://github.com/Asana/kraken
-pkg_kraken_commit = master
-
-PACKAGES += kucumberl
-pkg_kucumberl_name = kucumberl
-pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
-pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
-pkg_kucumberl_fetch = git
-pkg_kucumberl_repo = https://github.com/openshine/kucumberl
-pkg_kucumberl_commit = master
-
-PACKAGES += kvc
-pkg_kvc_name = kvc
-pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
-pkg_kvc_homepage = https://github.com/etrepum/kvc
-pkg_kvc_fetch = git
-pkg_kvc_repo = https://github.com/etrepum/kvc
-pkg_kvc_commit = master
-
-PACKAGES += kvlists
-pkg_kvlists_name = kvlists
-pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
-pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
-pkg_kvlists_fetch = git
-pkg_kvlists_repo = https://github.com/jcomellas/kvlists
-pkg_kvlists_commit = master
-
-PACKAGES += kvs
-pkg_kvs_name = kvs
-pkg_kvs_description = Container and Iterator
-pkg_kvs_homepage = https://github.com/synrc/kvs
-pkg_kvs_fetch = git
-pkg_kvs_repo = https://github.com/synrc/kvs
-pkg_kvs_commit = master
-
-PACKAGES += lager
-pkg_lager_name = lager
-pkg_lager_description = A logging framework for Erlang/OTP.
-pkg_lager_homepage = https://github.com/erlang-lager/lager
-pkg_lager_fetch = git
-pkg_lager_repo = https://github.com/erlang-lager/lager
-pkg_lager_commit = master
-
-PACKAGES += lager_amqp_backend
-pkg_lager_amqp_backend_name = lager_amqp_backend
-pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
-pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_fetch = git
-pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
-pkg_lager_amqp_backend_commit = master
-
-PACKAGES += lager_syslog
-pkg_lager_syslog_name = lager_syslog
-pkg_lager_syslog_description = Syslog backend for lager
-pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_fetch = git
-pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
-pkg_lager_syslog_commit = master
-
-PACKAGES += lambdapad
-pkg_lambdapad_name = lambdapad
-pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
-pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
-pkg_lambdapad_fetch = git
-pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
-pkg_lambdapad_commit = master
-
-PACKAGES += lasp
-pkg_lasp_name = lasp
-pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
-pkg_lasp_homepage = http://lasp-lang.org/
-pkg_lasp_fetch = git
-pkg_lasp_repo = https://github.com/lasp-lang/lasp
-pkg_lasp_commit = master
-
-PACKAGES += lasse
-pkg_lasse_name = lasse
-pkg_lasse_description = SSE handler for Cowboy
-pkg_lasse_homepage = https://github.com/inaka/lasse
-pkg_lasse_fetch = git
-pkg_lasse_repo = https://github.com/inaka/lasse
-pkg_lasse_commit = master
-
-PACKAGES += ldap
-pkg_ldap_name = ldap
-pkg_ldap_description = LDAP server written in Erlang
-pkg_ldap_homepage = https://github.com/spawnproc/ldap
-pkg_ldap_fetch = git
-pkg_ldap_repo = https://github.com/spawnproc/ldap
-pkg_ldap_commit = master
-
-PACKAGES += lethink
-pkg_lethink_name = lethink
-pkg_lethink_description = erlang driver for rethinkdb
-pkg_lethink_homepage = https://github.com/taybin/lethink
-pkg_lethink_fetch = git
-pkg_lethink_repo = https://github.com/taybin/lethink
-pkg_lethink_commit = master
-
-PACKAGES += lfe
-pkg_lfe_name = lfe
-pkg_lfe_description = Lisp Flavoured Erlang (LFE)
-pkg_lfe_homepage = https://github.com/rvirding/lfe
-pkg_lfe_fetch = git
-pkg_lfe_repo = https://github.com/rvirding/lfe
-pkg_lfe_commit = master
-
-PACKAGES += ling
-pkg_ling_name = ling
-pkg_ling_description = Erlang on Xen
-pkg_ling_homepage = https://github.com/cloudozer/ling
-pkg_ling_fetch = git
-pkg_ling_repo = https://github.com/cloudozer/ling
-pkg_ling_commit = master
-
-PACKAGES += live
-pkg_live_name = live
-pkg_live_description = Automated module and configuration reloader.
-pkg_live_homepage = http://ninenines.eu
-pkg_live_fetch = git
-pkg_live_repo = https://github.com/ninenines/live
-pkg_live_commit = master
-
-PACKAGES += lmq
-pkg_lmq_name = lmq
-pkg_lmq_description = Lightweight Message Queue
-pkg_lmq_homepage = https://github.com/iij/lmq
-pkg_lmq_fetch = git
-pkg_lmq_repo = https://github.com/iij/lmq
-pkg_lmq_commit = master
-
-PACKAGES += locker
-pkg_locker_name = locker
-pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
-pkg_locker_homepage = https://github.com/wooga/locker
-pkg_locker_fetch = git
-pkg_locker_repo = https://github.com/wooga/locker
-pkg_locker_commit = master
-
-PACKAGES += locks
-pkg_locks_name = locks
-pkg_locks_description = A scalable, deadlock-resolving resource locker
-pkg_locks_homepage = https://github.com/uwiger/locks
-pkg_locks_fetch = git
-pkg_locks_repo = https://github.com/uwiger/locks
-pkg_locks_commit = master
-
-PACKAGES += log4erl
-pkg_log4erl_name = log4erl
-pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
-pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
-pkg_log4erl_fetch = git
-pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
-pkg_log4erl_commit = master
-
-PACKAGES += lol
-pkg_lol_name = lol
-pkg_lol_description = Lisp on erLang, and programming is fun again
-pkg_lol_homepage = https://github.com/b0oh/lol
-pkg_lol_fetch = git
-pkg_lol_repo = https://github.com/b0oh/lol
-pkg_lol_commit = master
-
-PACKAGES += lucid
-pkg_lucid_name = lucid
-pkg_lucid_description = HTTP/2 server written in Erlang
-pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_fetch = git
-pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
-pkg_lucid_commit = master
-
-PACKAGES += luerl
-pkg_luerl_name = luerl
-pkg_luerl_description = Lua in Erlang
-pkg_luerl_homepage = https://github.com/rvirding/luerl
-pkg_luerl_fetch = git
-pkg_luerl_repo = https://github.com/rvirding/luerl
-pkg_luerl_commit = develop
-
-PACKAGES += luwak
-pkg_luwak_name = luwak
-pkg_luwak_description = Large-object storage interface for Riak
-pkg_luwak_homepage = https://github.com/basho/luwak
-pkg_luwak_fetch = git
-pkg_luwak_repo = https://github.com/basho/luwak
-pkg_luwak_commit = master
-
-PACKAGES += lux
-pkg_lux_name = lux
-pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
-pkg_lux_homepage = https://github.com/hawk/lux
-pkg_lux_fetch = git
-pkg_lux_repo = https://github.com/hawk/lux
-pkg_lux_commit = master
-
-PACKAGES += machi
-pkg_machi_name = machi
-pkg_machi_description = Machi file store
-pkg_machi_homepage = https://github.com/basho/machi
-pkg_machi_fetch = git
-pkg_machi_repo = https://github.com/basho/machi
-pkg_machi_commit = master
-
-PACKAGES += mad
-pkg_mad_name = mad
-pkg_mad_description = Small and Fast Rebar Replacement
-pkg_mad_homepage = https://github.com/synrc/mad
-pkg_mad_fetch = git
-pkg_mad_repo = https://github.com/synrc/mad
-pkg_mad_commit = master
-
-PACKAGES += marina
-pkg_marina_name = marina
-pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
-pkg_marina_homepage = https://github.com/lpgauth/marina
-pkg_marina_fetch = git
-pkg_marina_repo = https://github.com/lpgauth/marina
-pkg_marina_commit = master
-
-PACKAGES += mavg
-pkg_mavg_name = mavg
-pkg_mavg_description = Erlang :: Exponential moving average library
-pkg_mavg_homepage = https://github.com/EchoTeam/mavg
-pkg_mavg_fetch = git
-pkg_mavg_repo = https://github.com/EchoTeam/mavg
-pkg_mavg_commit = master
-
-PACKAGES += mc_erl
-pkg_mc_erl_name = mc_erl
-pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
-pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
-pkg_mc_erl_fetch = git
-pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
-pkg_mc_erl_commit = master
-
-PACKAGES += mcd
-pkg_mcd_name = mcd
-pkg_mcd_description = Fast memcached protocol client in pure Erlang
-pkg_mcd_homepage = https://github.com/EchoTeam/mcd
-pkg_mcd_fetch = git
-pkg_mcd_repo = https://github.com/EchoTeam/mcd
-pkg_mcd_commit = master
-
-PACKAGES += mcerlang
-pkg_mcerlang_name = mcerlang
-pkg_mcerlang_description = The McErlang model checker for Erlang
-pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
-pkg_mcerlang_fetch = git
-pkg_mcerlang_repo = https://github.com/fredlund/McErlang
-pkg_mcerlang_commit = master
-
-PACKAGES += meck
-pkg_meck_name = meck
-pkg_meck_description = A mocking library for Erlang
-pkg_meck_homepage = https://github.com/eproxus/meck
-pkg_meck_fetch = git
-pkg_meck_repo = https://github.com/eproxus/meck
-pkg_meck_commit = master
-
-PACKAGES += mekao
-pkg_mekao_name = mekao
-pkg_mekao_description = SQL constructor
-pkg_mekao_homepage = https://github.com/ddosia/mekao
-pkg_mekao_fetch = git
-pkg_mekao_repo = https://github.com/ddosia/mekao
-pkg_mekao_commit = master
-
-PACKAGES += memo
-pkg_memo_name = memo
-pkg_memo_description = Erlang memoization server
-pkg_memo_homepage = https://github.com/tuncer/memo
-pkg_memo_fetch = git
-pkg_memo_repo = https://github.com/tuncer/memo
-pkg_memo_commit = master
-
-PACKAGES += merge_index
-pkg_merge_index_name = merge_index
-pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
-pkg_merge_index_homepage = https://github.com/basho/merge_index
-pkg_merge_index_fetch = git
-pkg_merge_index_repo = https://github.com/basho/merge_index
-pkg_merge_index_commit = master
-
-PACKAGES += merl
-pkg_merl_name = merl
-pkg_merl_description = Metaprogramming in Erlang
-pkg_merl_homepage = https://github.com/richcarl/merl
-pkg_merl_fetch = git
-pkg_merl_repo = https://github.com/richcarl/merl
-pkg_merl_commit = master
-
-PACKAGES += mimerl
-pkg_mimerl_name = mimerl
-pkg_mimerl_description = library to handle mimetypes
-pkg_mimerl_homepage = https://github.com/benoitc/mimerl
-pkg_mimerl_fetch = git
-pkg_mimerl_repo = https://github.com/benoitc/mimerl
-pkg_mimerl_commit = master
-
-PACKAGES += mimetypes
-pkg_mimetypes_name = mimetypes
-pkg_mimetypes_description = Erlang MIME types library
-pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_fetch = git
-pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
-pkg_mimetypes_commit = master
-
-PACKAGES += mixer
-pkg_mixer_name = mixer
-pkg_mixer_description = Mix in functions from other modules
-pkg_mixer_homepage = https://github.com/chef/mixer
-pkg_mixer_fetch = git
-pkg_mixer_repo = https://github.com/chef/mixer
-pkg_mixer_commit = master
-
-PACKAGES += mochiweb
-pkg_mochiweb_name = mochiweb
-pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
-pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
-pkg_mochiweb_fetch = git
-pkg_mochiweb_repo = https://github.com/mochi/mochiweb
-pkg_mochiweb_commit = master
-
-PACKAGES += mochiweb_xpath
-pkg_mochiweb_xpath_name = mochiweb_xpath
-pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
-pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_fetch = git
-pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
-pkg_mochiweb_xpath_commit = master
-
-PACKAGES += mockgyver
-pkg_mockgyver_name = mockgyver
-pkg_mockgyver_description = A mocking library for Erlang
-pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
-pkg_mockgyver_fetch = git
-pkg_mockgyver_repo = https://github.com/klajo/mockgyver
-pkg_mockgyver_commit = master
-
-PACKAGES += modlib
-pkg_modlib_name = modlib
-pkg_modlib_description = Web framework based on Erlang's inets httpd
-pkg_modlib_homepage = https://github.com/gar1t/modlib
-pkg_modlib_fetch = git
-pkg_modlib_repo = https://github.com/gar1t/modlib
-pkg_modlib_commit = master
-
-PACKAGES += mongodb
-pkg_mongodb_name = mongodb
-pkg_mongodb_description = MongoDB driver for Erlang
-pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_fetch = git
-pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
-pkg_mongodb_commit = master
-
-PACKAGES += mongooseim
-pkg_mongooseim_name = mongooseim
-pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
-pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
-pkg_mongooseim_fetch = git
-pkg_mongooseim_repo = https://github.com/esl/MongooseIM
-pkg_mongooseim_commit = master
-
-PACKAGES += moyo
-pkg_moyo_name = moyo
-pkg_moyo_description = Erlang utility functions library
-pkg_moyo_homepage = https://github.com/dwango/moyo
-pkg_moyo_fetch = git
-pkg_moyo_repo = https://github.com/dwango/moyo
-pkg_moyo_commit = master
-
-PACKAGES += msgpack
-pkg_msgpack_name = msgpack
-pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
-pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_fetch = git
-pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
-pkg_msgpack_commit = master
-
-PACKAGES += mu2
-pkg_mu2_name = mu2
-pkg_mu2_description = Erlang mutation testing tool
-pkg_mu2_homepage = https://github.com/ramsay-t/mu2
-pkg_mu2_fetch = git
-pkg_mu2_repo = https://github.com/ramsay-t/mu2
-pkg_mu2_commit = master
-
-PACKAGES += mustache
-pkg_mustache_name = mustache
-pkg_mustache_description = Mustache template engine for Erlang.
-pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
-pkg_mustache_fetch = git
-pkg_mustache_repo = https://github.com/mojombo/mustache.erl
-pkg_mustache_commit = master
-
-PACKAGES += myproto
-pkg_myproto_name = myproto
-pkg_myproto_description = MySQL Server Protocol in Erlang
-pkg_myproto_homepage = https://github.com/altenwald/myproto
-pkg_myproto_fetch = git
-pkg_myproto_repo = https://github.com/altenwald/myproto
-pkg_myproto_commit = master
-
-PACKAGES += mysql
-pkg_mysql_name = mysql
-pkg_mysql_description = MySQL client library for Erlang/OTP
-pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_fetch = git
-pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
-
-PACKAGES += n2o
-pkg_n2o_name = n2o
-pkg_n2o_description = WebSocket Application Server
-pkg_n2o_homepage = https://github.com/5HT/n2o
-pkg_n2o_fetch = git
-pkg_n2o_repo = https://github.com/5HT/n2o
-pkg_n2o_commit = master
-
-PACKAGES += nat_upnp
-pkg_nat_upnp_name = nat_upnp
-pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
-pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_fetch = git
-pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
-pkg_nat_upnp_commit = master
-
-PACKAGES += neo4j
-pkg_neo4j_name = neo4j
-pkg_neo4j_description = Erlang client library for Neo4J.
-pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_fetch = git
-pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
-pkg_neo4j_commit = master
-
-PACKAGES += neotoma
-pkg_neotoma_name = neotoma
-pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
-pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
-pkg_neotoma_fetch = git
-pkg_neotoma_repo = https://github.com/seancribbs/neotoma
-pkg_neotoma_commit = master
-
-PACKAGES += newrelic
-pkg_newrelic_name = newrelic
-pkg_newrelic_description = Erlang library for sending metrics to New Relic
-pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_fetch = git
-pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
-pkg_newrelic_commit = master
-
-PACKAGES += nifty
-pkg_nifty_name = nifty
-pkg_nifty_description = Erlang NIF wrapper generator
-pkg_nifty_homepage = https://github.com/parapluu/nifty
-pkg_nifty_fetch = git
-pkg_nifty_repo = https://github.com/parapluu/nifty
-pkg_nifty_commit = master
-
-PACKAGES += nitrogen_core
-pkg_nitrogen_core_name = nitrogen_core
-pkg_nitrogen_core_description = The core Nitrogen library.
-pkg_nitrogen_core_homepage = http://nitrogenproject.com/
-pkg_nitrogen_core_fetch = git
-pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
-pkg_nitrogen_core_commit = master
-
-PACKAGES += nkbase
-pkg_nkbase_name = nkbase
-pkg_nkbase_description = NkBASE distributed database
-pkg_nkbase_homepage = https://github.com/Nekso/nkbase
-pkg_nkbase_fetch = git
-pkg_nkbase_repo = https://github.com/Nekso/nkbase
-pkg_nkbase_commit = develop
-
-PACKAGES += nkdocker
-pkg_nkdocker_name = nkdocker
-pkg_nkdocker_description = Erlang Docker client
-pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
-pkg_nkdocker_fetch = git
-pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
-pkg_nkdocker_commit = master
-
-PACKAGES += nkpacket
-pkg_nkpacket_name = nkpacket
-pkg_nkpacket_description = Generic Erlang transport layer
-pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
-pkg_nkpacket_fetch = git
-pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
-pkg_nkpacket_commit = master
-
-PACKAGES += nksip
-pkg_nksip_name = nksip
-pkg_nksip_description = Erlang SIP application server
-pkg_nksip_homepage = https://github.com/kalta/nksip
-pkg_nksip_fetch = git
-pkg_nksip_repo = https://github.com/kalta/nksip
-pkg_nksip_commit = master
-
-PACKAGES += nodefinder
-pkg_nodefinder_name = nodefinder
-pkg_nodefinder_description = automatic node discovery via UDP multicast
-pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
-pkg_nodefinder_fetch = git
-pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
-pkg_nodefinder_commit = master
-
-PACKAGES += nprocreg
-pkg_nprocreg_name = nprocreg
-pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
-pkg_nprocreg_homepage = http://nitrogenproject.com/
-pkg_nprocreg_fetch = git
-pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
-pkg_nprocreg_commit = master
-
-PACKAGES += oauth
-pkg_oauth_name = oauth
-pkg_oauth_description = An Erlang OAuth 1.0 implementation
-pkg_oauth_homepage = https://github.com/tim/erlang-oauth
-pkg_oauth_fetch = git
-pkg_oauth_repo = https://github.com/tim/erlang-oauth
-pkg_oauth_commit = master
-
-PACKAGES += oauth2
-pkg_oauth2_name = oauth2
-pkg_oauth2_description = Erlang Oauth2 implementation
-pkg_oauth2_homepage = https://github.com/kivra/oauth2
-pkg_oauth2_fetch = git
-pkg_oauth2_repo = https://github.com/kivra/oauth2
-pkg_oauth2_commit = master
-
-PACKAGES += observer_cli
-pkg_observer_cli_name = observer_cli
-pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
-pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
-pkg_observer_cli_fetch = git
-pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
-pkg_observer_cli_commit = master
-
-PACKAGES += octopus
-pkg_octopus_name = octopus
-pkg_octopus_description = Small and flexible pool manager written in Erlang
-pkg_octopus_homepage = https://github.com/erlangbureau/octopus
-pkg_octopus_fetch = git
-pkg_octopus_repo = https://github.com/erlangbureau/octopus
-pkg_octopus_commit = master
-
-PACKAGES += of_protocol
-pkg_of_protocol_name = of_protocol
-pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
-pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_fetch = git
-pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
-pkg_of_protocol_commit = master
-
-PACKAGES += opencouch
-pkg_opencouch_name = couch
-pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
-pkg_opencouch_homepage = https://github.com/benoitc/opencouch
-pkg_opencouch_fetch = git
-pkg_opencouch_repo = https://github.com/benoitc/opencouch
-pkg_opencouch_commit = master
-
-PACKAGES += openflow
-pkg_openflow_name = openflow
-pkg_openflow_description = An OpenFlow controller written in pure erlang
-pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_fetch = git
-pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
-pkg_openflow_commit = master
-
-PACKAGES += openid
-pkg_openid_name = openid
-pkg_openid_description = Erlang OpenID
-pkg_openid_homepage = https://github.com/brendonh/erl_openid
-pkg_openid_fetch = git
-pkg_openid_repo = https://github.com/brendonh/erl_openid
-pkg_openid_commit = master
-
-PACKAGES += openpoker
-pkg_openpoker_name = openpoker
-pkg_openpoker_description = Genesis Texas hold'em Game Server
-pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
-pkg_openpoker_fetch = git
-pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
-pkg_openpoker_commit = master
-
-PACKAGES += otpbp
-pkg_otpbp_name = otpbp
-pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
-pkg_otpbp_homepage = https://github.com/Ledest/otpbp
-pkg_otpbp_fetch = git
-pkg_otpbp_repo = https://github.com/Ledest/otpbp
-pkg_otpbp_commit = master
-
-PACKAGES += pal
-pkg_pal_name = pal
-pkg_pal_description = Pragmatic Authentication Library
-pkg_pal_homepage = https://github.com/manifest/pal
-pkg_pal_fetch = git
-pkg_pal_repo = https://github.com/manifest/pal
-pkg_pal_commit = master
-
-PACKAGES += parse_trans
-pkg_parse_trans_name = parse_trans
-pkg_parse_trans_description = Parse transform utilities for Erlang
-pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
-pkg_parse_trans_fetch = git
-pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
-pkg_parse_trans_commit = master
-
-PACKAGES += parsexml
-pkg_parsexml_name = parsexml
-pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
-pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
-pkg_parsexml_fetch = git
-pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
-pkg_parsexml_commit = master
-
-PACKAGES += partisan
-pkg_partisan_name = partisan
-pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
-pkg_partisan_homepage = http://partisan.cloud
-pkg_partisan_fetch = git
-pkg_partisan_repo = https://github.com/lasp-lang/partisan
-pkg_partisan_commit = master
-
-PACKAGES += pegjs
-pkg_pegjs_name = pegjs
-pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
-pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
-pkg_pegjs_fetch = git
-pkg_pegjs_repo = https://github.com/dmitriid/pegjs
-pkg_pegjs_commit = master
-
-PACKAGES += percept2
-pkg_percept2_name = percept2
-pkg_percept2_description = Concurrent profiling tool for Erlang
-pkg_percept2_homepage = https://github.com/huiqing/percept2
-pkg_percept2_fetch = git
-pkg_percept2_repo = https://github.com/huiqing/percept2
-pkg_percept2_commit = master
-
-PACKAGES += pgo
-pkg_pgo_name = pgo
-pkg_pgo_description = Erlang Postgres client and connection pool
-pkg_pgo_homepage = https://github.com/erleans/pgo.git
-pkg_pgo_fetch = git
-pkg_pgo_repo = https://github.com/erleans/pgo.git
-pkg_pgo_commit = master
-
-PACKAGES += pgsql
-pkg_pgsql_name = pgsql
-pkg_pgsql_description = Erlang PostgreSQL driver
-pkg_pgsql_homepage = https://github.com/semiocast/pgsql
-pkg_pgsql_fetch = git
-pkg_pgsql_repo = https://github.com/semiocast/pgsql
-pkg_pgsql_commit = master
-
-PACKAGES += pkgx
-pkg_pkgx_name = pkgx
-pkg_pkgx_description = Build .deb packages from Erlang releases
-pkg_pkgx_homepage = https://github.com/arjan/pkgx
-pkg_pkgx_fetch = git
-pkg_pkgx_repo = https://github.com/arjan/pkgx
-pkg_pkgx_commit = master
-
-PACKAGES += pkt
-pkg_pkt_name = pkt
-pkg_pkt_description = Erlang network protocol library
-pkg_pkt_homepage = https://github.com/msantos/pkt
-pkg_pkt_fetch = git
-pkg_pkt_repo = https://github.com/msantos/pkt
-pkg_pkt_commit = master
-
-PACKAGES += plain_fsm
-pkg_plain_fsm_name = plain_fsm
-pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
-pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_fetch = git
-pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
-pkg_plain_fsm_commit = master
-
-PACKAGES += plumtree
-pkg_plumtree_name = plumtree
-pkg_plumtree_description = Epidemic Broadcast Trees
-pkg_plumtree_homepage = https://github.com/helium/plumtree
-pkg_plumtree_fetch = git
-pkg_plumtree_repo = https://github.com/helium/plumtree
-pkg_plumtree_commit = master
-
-PACKAGES += pmod_transform
-pkg_pmod_transform_name = pmod_transform
-pkg_pmod_transform_description = Parse transform for parameterized modules
-pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_fetch = git
-pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
-pkg_pmod_transform_commit = master
-
-PACKAGES += pobox
-pkg_pobox_name = pobox
-pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
-pkg_pobox_homepage = https://github.com/ferd/pobox
-pkg_pobox_fetch = git
-pkg_pobox_repo = https://github.com/ferd/pobox
-pkg_pobox_commit = master
-
-PACKAGES += ponos
-pkg_ponos_name = ponos
-pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
-pkg_ponos_homepage = https://github.com/klarna/ponos
-pkg_ponos_fetch = git
-pkg_ponos_repo = https://github.com/klarna/ponos
-pkg_ponos_commit = master
-
-PACKAGES += poolboy
-pkg_poolboy_name = poolboy
-pkg_poolboy_description = A hunky Erlang worker pool factory
-pkg_poolboy_homepage = https://github.com/devinus/poolboy
-pkg_poolboy_fetch = git
-pkg_poolboy_repo = https://github.com/devinus/poolboy
-pkg_poolboy_commit = master
-
-PACKAGES += pooler
-pkg_pooler_name = pooler
-pkg_pooler_description = An OTP Process Pool Application
-pkg_pooler_homepage = https://github.com/seth/pooler
-pkg_pooler_fetch = git
-pkg_pooler_repo = https://github.com/seth/pooler
-pkg_pooler_commit = master
-
-PACKAGES += pqueue
-pkg_pqueue_name = pqueue
-pkg_pqueue_description = Erlang Priority Queues
-pkg_pqueue_homepage = https://github.com/okeuday/pqueue
-pkg_pqueue_fetch = git
-pkg_pqueue_repo = https://github.com/okeuday/pqueue
-pkg_pqueue_commit = master
-
-PACKAGES += procket
-pkg_procket_name = procket
-pkg_procket_description = Erlang interface to low level socket operations
-pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
-pkg_procket_fetch = git
-pkg_procket_repo = https://github.com/msantos/procket
-pkg_procket_commit = master
-
-PACKAGES += prometheus
-pkg_prometheus_name = prometheus
-pkg_prometheus_description = Prometheus.io client in Erlang
-pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_fetch = git
-pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
-pkg_prometheus_commit = master
-
-PACKAGES += prop
-pkg_prop_name = prop
-pkg_prop_description = An Erlang code scaffolding and generator system.
-pkg_prop_homepage = https://github.com/nuex/prop
-pkg_prop_fetch = git
-pkg_prop_repo = https://github.com/nuex/prop
-pkg_prop_commit = master
-
-PACKAGES += proper
-pkg_proper_name = proper
-pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
-pkg_proper_homepage = http://proper.softlab.ntua.gr
-pkg_proper_fetch = git
-pkg_proper_repo = https://github.com/manopapad/proper
-pkg_proper_commit = master
-
-PACKAGES += props
-pkg_props_name = props
-pkg_props_description = Property structure library
-pkg_props_homepage = https://github.com/greyarea/props
-pkg_props_fetch = git
-pkg_props_repo = https://github.com/greyarea/props
-pkg_props_commit = master
-
-PACKAGES += protobuffs
-pkg_protobuffs_name = protobuffs
-pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
-pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_fetch = git
-pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
-pkg_protobuffs_commit = master
-
-PACKAGES += psycho
-pkg_psycho_name = psycho
-pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
-pkg_psycho_homepage = https://github.com/gar1t/psycho
-pkg_psycho_fetch = git
-pkg_psycho_repo = https://github.com/gar1t/psycho
-pkg_psycho_commit = master
-
-PACKAGES += purity
-pkg_purity_name = purity
-pkg_purity_description = A side-effect analyzer for Erlang
-pkg_purity_homepage = https://github.com/mpitid/purity
-pkg_purity_fetch = git
-pkg_purity_repo = https://github.com/mpitid/purity
-pkg_purity_commit = master
-
-PACKAGES += push_service
-pkg_push_service_name = push_service
-pkg_push_service_description = Push service
-pkg_push_service_homepage = https://github.com/hairyhum/push_service
-pkg_push_service_fetch = git
-pkg_push_service_repo = https://github.com/hairyhum/push_service
-pkg_push_service_commit = master
-
-PACKAGES += qdate
-pkg_qdate_name = qdate
-pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
-pkg_qdate_homepage = https://github.com/choptastic/qdate
-pkg_qdate_fetch = git
-pkg_qdate_repo = https://github.com/choptastic/qdate
-pkg_qdate_commit = master
-
-PACKAGES += qrcode
-pkg_qrcode_name = qrcode
-pkg_qrcode_description = QR Code encoder in Erlang
-pkg_qrcode_homepage = https://github.com/komone/qrcode
-pkg_qrcode_fetch = git
-pkg_qrcode_repo = https://github.com/komone/qrcode
-pkg_qrcode_commit = master
-
-PACKAGES += quest
-pkg_quest_name = quest
-pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
-pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
-pkg_quest_fetch = git
-pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
-pkg_quest_commit = master
-
-PACKAGES += quickrand
-pkg_quickrand_name = quickrand
-pkg_quickrand_description = Quick Erlang Random Number Generation
-pkg_quickrand_homepage = https://github.com/okeuday/quickrand
-pkg_quickrand_fetch = git
-pkg_quickrand_repo = https://github.com/okeuday/quickrand
-pkg_quickrand_commit = master
-
-PACKAGES += rabbit
-pkg_rabbit_name = rabbit
-pkg_rabbit_description = RabbitMQ Server
-pkg_rabbit_homepage = https://www.rabbitmq.com/
-pkg_rabbit_fetch = git
-pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
-pkg_rabbit_commit = master
-
-PACKAGES += rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
-pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
-pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_fetch = git
-pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
-pkg_rabbit_exchange_type_riak_commit = master
-
-PACKAGES += rack
-pkg_rack_name = rack
-pkg_rack_description = Rack handler for erlang
-pkg_rack_homepage = https://github.com/erlyvideo/rack
-pkg_rack_fetch = git
-pkg_rack_repo = https://github.com/erlyvideo/rack
-pkg_rack_commit = master
-
-PACKAGES += radierl
-pkg_radierl_name = radierl
-pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
-pkg_radierl_homepage = https://github.com/vances/radierl
-pkg_radierl_fetch = git
-pkg_radierl_repo = https://github.com/vances/radierl
-pkg_radierl_commit = master
-
-PACKAGES += rafter
-pkg_rafter_name = rafter
-pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
-pkg_rafter_homepage = https://github.com/andrewjstone/rafter
-pkg_rafter_fetch = git
-pkg_rafter_repo = https://github.com/andrewjstone/rafter
-pkg_rafter_commit = master
-
-PACKAGES += ranch
-pkg_ranch_name = ranch
-pkg_ranch_description = Socket acceptor pool for TCP protocols.
-pkg_ranch_homepage = http://ninenines.eu
-pkg_ranch_fetch = git
-pkg_ranch_repo = https://github.com/ninenines/ranch
-pkg_ranch_commit = 1.2.1
-
-PACKAGES += rbeacon
-pkg_rbeacon_name = rbeacon
-pkg_rbeacon_description = LAN discovery and presence in Erlang.
-pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
-pkg_rbeacon_fetch = git
-pkg_rbeacon_repo = https://github.com/refuge/rbeacon
-pkg_rbeacon_commit = master
-
-PACKAGES += rebar
-pkg_rebar_name = rebar
-pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
-pkg_rebar_homepage = http://www.rebar3.org
-pkg_rebar_fetch = git
-pkg_rebar_repo = https://github.com/rebar/rebar3
-pkg_rebar_commit = master
-
-PACKAGES += rebus
-pkg_rebus_name = rebus
-pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
-pkg_rebus_homepage = https://github.com/olle/rebus
-pkg_rebus_fetch = git
-pkg_rebus_repo = https://github.com/olle/rebus
-pkg_rebus_commit = master
-
-PACKAGES += rec2json
-pkg_rec2json_name = rec2json
-pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
-pkg_rec2json_homepage = https://github.com/lordnull/rec2json
-pkg_rec2json_fetch = git
-pkg_rec2json_repo = https://github.com/lordnull/rec2json
-pkg_rec2json_commit = master
-
-PACKAGES += recon
-pkg_recon_name = recon
-pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
-pkg_recon_homepage = https://github.com/ferd/recon
-pkg_recon_fetch = git
-pkg_recon_repo = https://github.com/ferd/recon
-pkg_recon_commit = master
-
-PACKAGES += record_info
-pkg_record_info_name = record_info
-pkg_record_info_description = Convert between record and proplist
-pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_fetch = git
-pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
-pkg_record_info_commit = master
-
-PACKAGES += redgrid
-pkg_redgrid_name = redgrid
-pkg_redgrid_description = automatic Erlang node discovery via redis
-pkg_redgrid_homepage = https://github.com/jkvor/redgrid
-pkg_redgrid_fetch = git
-pkg_redgrid_repo = https://github.com/jkvor/redgrid
-pkg_redgrid_commit = master
-
-PACKAGES += redo
-pkg_redo_name = redo
-pkg_redo_description = pipelined erlang redis client
-pkg_redo_homepage = https://github.com/jkvor/redo
-pkg_redo_fetch = git
-pkg_redo_repo = https://github.com/jkvor/redo
-pkg_redo_commit = master
-
-PACKAGES += reload_mk
-pkg_reload_mk_name = reload_mk
-pkg_reload_mk_description = Live reload plugin for erlang.mk.
-pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
-pkg_reload_mk_fetch = git
-pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
-pkg_reload_mk_commit = master
-
-PACKAGES += reltool_util
-pkg_reltool_util_name = reltool_util
-pkg_reltool_util_description = Erlang reltool utility functionality application
-pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
-pkg_reltool_util_fetch = git
-pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
-pkg_reltool_util_commit = master
-
-PACKAGES += relx
-pkg_relx_name = relx
-pkg_relx_description = Sane, simple release creation for Erlang
-pkg_relx_homepage = https://github.com/erlware/relx
-pkg_relx_fetch = git
-pkg_relx_repo = https://github.com/erlware/relx
-pkg_relx_commit = master
-
-PACKAGES += resource_discovery
-pkg_resource_discovery_name = resource_discovery
-pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
-pkg_resource_discovery_homepage = http://erlware.org/
-pkg_resource_discovery_fetch = git
-pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
-pkg_resource_discovery_commit = master
-
-PACKAGES += restc
-pkg_restc_name = restc
-pkg_restc_description = Erlang Rest Client
-pkg_restc_homepage = https://github.com/kivra/restclient
-pkg_restc_fetch = git
-pkg_restc_repo = https://github.com/kivra/restclient
-pkg_restc_commit = master
-
-PACKAGES += rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
-pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
-pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_fetch = git
-pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
-pkg_rfc4627_jsonrpc_commit = master
-
-PACKAGES += riak_control
-pkg_riak_control_name = riak_control
-pkg_riak_control_description = Webmachine-based administration interface for Riak.
-pkg_riak_control_homepage = https://github.com/basho/riak_control
-pkg_riak_control_fetch = git
-pkg_riak_control_repo = https://github.com/basho/riak_control
-pkg_riak_control_commit = master
-
-PACKAGES += riak_core
-pkg_riak_core_name = riak_core
-pkg_riak_core_description = Distributed systems infrastructure used by Riak.
-pkg_riak_core_homepage = https://github.com/basho/riak_core
-pkg_riak_core_fetch = git
-pkg_riak_core_repo = https://github.com/basho/riak_core
-pkg_riak_core_commit = master
-
-PACKAGES += riak_dt
-pkg_riak_dt_name = riak_dt
-pkg_riak_dt_description = Convergent replicated datatypes in Erlang
-pkg_riak_dt_homepage = https://github.com/basho/riak_dt
-pkg_riak_dt_fetch = git
-pkg_riak_dt_repo = https://github.com/basho/riak_dt
-pkg_riak_dt_commit = master
-
-PACKAGES += riak_ensemble
-pkg_riak_ensemble_name = riak_ensemble
-pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
-pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_fetch = git
-pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
-pkg_riak_ensemble_commit = master
-
-PACKAGES += riak_kv
-pkg_riak_kv_name = riak_kv
-pkg_riak_kv_description = Riak Key/Value Store
-pkg_riak_kv_homepage = https://github.com/basho/riak_kv
-pkg_riak_kv_fetch = git
-pkg_riak_kv_repo = https://github.com/basho/riak_kv
-pkg_riak_kv_commit = master
-
-PACKAGES += riak_pg
-pkg_riak_pg_name = riak_pg
-pkg_riak_pg_description = Distributed process groups with riak_core.
-pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_fetch = git
-pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
-pkg_riak_pg_commit = master
-
-PACKAGES += riak_pipe
-pkg_riak_pipe_name = riak_pipe
-pkg_riak_pipe_description = Riak Pipelines
-pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
-pkg_riak_pipe_fetch = git
-pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
-pkg_riak_pipe_commit = master
-
-PACKAGES += riak_sysmon
-pkg_riak_sysmon_name = riak_sysmon
-pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
-pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_fetch = git
-pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
-pkg_riak_sysmon_commit = master
-
-PACKAGES += riak_test
-pkg_riak_test_name = riak_test
-pkg_riak_test_description = I'm in your cluster, testing your riaks
-pkg_riak_test_homepage = https://github.com/basho/riak_test
-pkg_riak_test_fetch = git
-pkg_riak_test_repo = https://github.com/basho/riak_test
-pkg_riak_test_commit = master
-
-PACKAGES += riakc
-pkg_riakc_name = riakc
-pkg_riakc_description = Erlang clients for Riak.
-pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
-pkg_riakc_fetch = git
-pkg_riakc_repo = https://github.com/basho/riak-erlang-client
-pkg_riakc_commit = master
-
-PACKAGES += riakhttpc
-pkg_riakhttpc_name = riakhttpc
-pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
-pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_fetch = git
-pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
-pkg_riakhttpc_commit = master
-
-PACKAGES += riaknostic
-pkg_riaknostic_name = riaknostic
-pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
-pkg_riaknostic_homepage = https://github.com/basho/riaknostic
-pkg_riaknostic_fetch = git
-pkg_riaknostic_repo = https://github.com/basho/riaknostic
-pkg_riaknostic_commit = master
-
-PACKAGES += riakpool
-pkg_riakpool_name = riakpool
-pkg_riakpool_description = erlang riak client pool
-pkg_riakpool_homepage = https://github.com/dweldon/riakpool
-pkg_riakpool_fetch = git
-pkg_riakpool_repo = https://github.com/dweldon/riakpool
-pkg_riakpool_commit = master
-
-PACKAGES += rivus_cep
-pkg_rivus_cep_name = rivus_cep
-pkg_rivus_cep_description = Complex event processing in Erlang
-pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_fetch = git
-pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
-pkg_rivus_cep_commit = master
-
-PACKAGES += rlimit
-pkg_rlimit_name = rlimit
-pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
-pkg_rlimit_homepage = https://github.com/jlouis/rlimit
-pkg_rlimit_fetch = git
-pkg_rlimit_repo = https://github.com/jlouis/rlimit
-pkg_rlimit_commit = master
-
-PACKAGES += rust_mk
-pkg_rust_mk_name = rust_mk
-pkg_rust_mk_description = Build Rust crates in an Erlang application
-pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_fetch = git
-pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
-pkg_rust_mk_commit = master
-
-PACKAGES += safetyvalve
-pkg_safetyvalve_name = safetyvalve
-pkg_safetyvalve_description = A safety valve for your erlang node
-pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_fetch = git
-pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
-pkg_safetyvalve_commit = master
-
-PACKAGES += seestar
-pkg_seestar_name = seestar
-pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
-pkg_seestar_homepage = https://github.com/iamaleksey/seestar
-pkg_seestar_fetch = git
-pkg_seestar_repo = https://github.com/iamaleksey/seestar
-pkg_seestar_commit = master
-
-PACKAGES += service
-pkg_service_name = service
-pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
-pkg_service_homepage = http://cloudi.org/
-pkg_service_fetch = git
-pkg_service_repo = https://github.com/CloudI/service
-pkg_service_commit = master
-
-PACKAGES += setup
-pkg_setup_name = setup
-pkg_setup_description = Generic setup utility for Erlang-based systems
-pkg_setup_homepage = https://github.com/uwiger/setup
-pkg_setup_fetch = git
-pkg_setup_repo = https://github.com/uwiger/setup
-pkg_setup_commit = master
-
-PACKAGES += sext
-pkg_sext_name = sext
-pkg_sext_description = Sortable Erlang Term Serialization
-pkg_sext_homepage = https://github.com/uwiger/sext
-pkg_sext_fetch = git
-pkg_sext_repo = https://github.com/uwiger/sext
-pkg_sext_commit = master
-
-PACKAGES += sfmt
-pkg_sfmt_name = sfmt
-pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
-pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_fetch = git
-pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
-pkg_sfmt_commit = master
-
-PACKAGES += sgte
-pkg_sgte_name = sgte
-pkg_sgte_description = A simple Erlang Template Engine
-pkg_sgte_homepage = https://github.com/filippo/sgte
-pkg_sgte_fetch = git
-pkg_sgte_repo = https://github.com/filippo/sgte
-pkg_sgte_commit = master
-
-PACKAGES += sheriff
-pkg_sheriff_name = sheriff
-pkg_sheriff_description = Parse transform for type based validation.
-pkg_sheriff_homepage = http://ninenines.eu
-pkg_sheriff_fetch = git
-pkg_sheriff_repo = https://github.com/extend/sheriff
-pkg_sheriff_commit = master
-
-PACKAGES += shotgun
-pkg_shotgun_name = shotgun
-pkg_shotgun_description = better than just a gun
-pkg_shotgun_homepage = https://github.com/inaka/shotgun
-pkg_shotgun_fetch = git
-pkg_shotgun_repo = https://github.com/inaka/shotgun
-pkg_shotgun_commit = master
-
-PACKAGES += sidejob
-pkg_sidejob_name = sidejob
-pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
-pkg_sidejob_homepage = https://github.com/basho/sidejob
-pkg_sidejob_fetch = git
-pkg_sidejob_repo = https://github.com/basho/sidejob
-pkg_sidejob_commit = master
-
-PACKAGES += sieve
-pkg_sieve_name = sieve
-pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
-pkg_sieve_homepage = https://github.com/benoitc/sieve
-pkg_sieve_fetch = git
-pkg_sieve_repo = https://github.com/benoitc/sieve
-pkg_sieve_commit = master
-
-PACKAGES += sighandler
-pkg_sighandler_name = sighandler
-pkg_sighandler_description = Handle UNIX signals in Er lang
-pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
-pkg_sighandler_fetch = git
-pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
-pkg_sighandler_commit = master
-
-PACKAGES += simhash
-pkg_simhash_name = simhash
-pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
-pkg_simhash_homepage = https://github.com/ferd/simhash
-pkg_simhash_fetch = git
-pkg_simhash_repo = https://github.com/ferd/simhash
-pkg_simhash_commit = master
-
-PACKAGES += simple_bridge
-pkg_simple_bridge_name = simple_bridge
-pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
-pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_fetch = git
-pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
-pkg_simple_bridge_commit = master
-
-PACKAGES += simple_oauth2
-pkg_simple_oauth2_name = simple_oauth2
-pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
-pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_fetch = git
-pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
-pkg_simple_oauth2_commit = master
-
-PACKAGES += skel
-pkg_skel_name = skel
-pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
-pkg_skel_homepage = https://github.com/ParaPhrase/skel
-pkg_skel_fetch = git
-pkg_skel_repo = https://github.com/ParaPhrase/skel
-pkg_skel_commit = master
-
-PACKAGES += slack
-pkg_slack_name = slack
-pkg_slack_description = Minimal slack notification OTP library.
-pkg_slack_homepage = https://github.com/DonBranson/slack
-pkg_slack_fetch = git
-pkg_slack_repo = https://github.com/DonBranson/slack.git
-pkg_slack_commit = master
-
-PACKAGES += smother
-pkg_smother_name = smother
-pkg_smother_description = Extended code coverage metrics for Erlang.
-pkg_smother_homepage = https://ramsay-t.github.io/Smother/
-pkg_smother_fetch = git
-pkg_smother_repo = https://github.com/ramsay-t/Smother
-pkg_smother_commit = master
-
-PACKAGES += snappyer
-pkg_snappyer_name = snappyer
-pkg_snappyer_description = Snappy as nif for Erlang
-pkg_snappyer_homepage = https://github.com/zmstone/snappyer
-pkg_snappyer_fetch = git
-pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
-pkg_snappyer_commit = master
-
-PACKAGES += social
-pkg_social_name = social
-pkg_social_description = Cowboy handler for social login via OAuth2 providers
-pkg_social_homepage = https://github.com/dvv/social
-pkg_social_fetch = git
-pkg_social_repo = https://github.com/dvv/social
-pkg_social_commit = master
-
-PACKAGES += spapi_router
-pkg_spapi_router_name = spapi_router
-pkg_spapi_router_description = Partially-connected Erlang clustering
-pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
-pkg_spapi_router_fetch = git
-pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
-pkg_spapi_router_commit = master
-
-PACKAGES += sqerl
-pkg_sqerl_name = sqerl
-pkg_sqerl_description = An Erlang-flavoured SQL DSL
-pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
-pkg_sqerl_fetch = git
-pkg_sqerl_repo = https://github.com/hairyhum/sqerl
-pkg_sqerl_commit = master
-
-PACKAGES += srly
-pkg_srly_name = srly
-pkg_srly_description = Native Erlang Unix serial interface
-pkg_srly_homepage = https://github.com/msantos/srly
-pkg_srly_fetch = git
-pkg_srly_repo = https://github.com/msantos/srly
-pkg_srly_commit = master
-
-PACKAGES += sshrpc
-pkg_sshrpc_name = sshrpc
-pkg_sshrpc_description = Erlang SSH RPC module (experimental)
-pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_fetch = git
-pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
-pkg_sshrpc_commit = master
-
-PACKAGES += stable
-pkg_stable_name = stable
-pkg_stable_description = Library of assorted helpers for Cowboy web server.
-pkg_stable_homepage = https://github.com/dvv/stable
-pkg_stable_fetch = git
-pkg_stable_repo = https://github.com/dvv/stable
-pkg_stable_commit = master
-
-PACKAGES += statebox
-pkg_statebox_name = statebox
-pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
-pkg_statebox_homepage = https://github.com/mochi/statebox
-pkg_statebox_fetch = git
-pkg_statebox_repo = https://github.com/mochi/statebox
-pkg_statebox_commit = master
-
-PACKAGES += statebox_riak
-pkg_statebox_riak_name = statebox_riak
-pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
-pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_fetch = git
-pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
-pkg_statebox_riak_commit = master
-
-PACKAGES += statman
-pkg_statman_name = statman
-pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
-pkg_statman_homepage = https://github.com/knutin/statman
-pkg_statman_fetch = git
-pkg_statman_repo = https://github.com/knutin/statman
-pkg_statman_commit = master
-
-PACKAGES += statsderl
-pkg_statsderl_name = statsderl
-pkg_statsderl_description = StatsD client (erlang)
-pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
-pkg_statsderl_fetch = git
-pkg_statsderl_repo = https://github.com/lpgauth/statsderl
-pkg_statsderl_commit = master
-
-PACKAGES += stdinout_pool
-pkg_stdinout_pool_name = stdinout_pool
-pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
-pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_fetch = git
-pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
-pkg_stdinout_pool_commit = master
-
-PACKAGES += stockdb
-pkg_stockdb_name = stockdb
-pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
-pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
-pkg_stockdb_fetch = git
-pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
-pkg_stockdb_commit = master
-
-PACKAGES += stripe
-pkg_stripe_name = stripe
-pkg_stripe_description = Erlang interface to the stripe.com API
-pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
-pkg_stripe_fetch = git
-pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
-pkg_stripe_commit = v1
-
-PACKAGES += subproc
-pkg_subproc_name = subproc
-pkg_subproc_description = unix subprocess manager with {active,once|false} modes
-pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
-pkg_subproc_fetch = git
-pkg_subproc_repo = https://github.com/dozzie/subproc
-pkg_subproc_commit = v0.1.0
-
-PACKAGES += supervisor3
-pkg_supervisor3_name = supervisor3
-pkg_supervisor3_description = OTP supervisor with additional strategies
-pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
-pkg_supervisor3_fetch = git
-pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
-pkg_supervisor3_commit = master
-
-PACKAGES += surrogate
-pkg_surrogate_name = surrogate
-pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
-pkg_surrogate_homepage = https://github.com/skruger/Surrogate
-pkg_surrogate_fetch = git
-pkg_surrogate_repo = https://github.com/skruger/Surrogate
-pkg_surrogate_commit = master
-
-PACKAGES += swab
-pkg_swab_name = swab
-pkg_swab_description = General purpose buffer handling module
-pkg_swab_homepage = https://github.com/crownedgrouse/swab
-pkg_swab_fetch = git
-pkg_swab_repo = https://github.com/crownedgrouse/swab
-pkg_swab_commit = master
-
-PACKAGES += swarm
-pkg_swarm_name = swarm
-pkg_swarm_description = Fast and simple acceptor pool for Erlang
-pkg_swarm_homepage = https://github.com/jeremey/swarm
-pkg_swarm_fetch = git
-pkg_swarm_repo = https://github.com/jeremey/swarm
-pkg_swarm_commit = master
-
-PACKAGES += switchboard
-pkg_switchboard_name = switchboard
-pkg_switchboard_description = A framework for processing email using worker plugins.
-pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
-pkg_switchboard_fetch = git
-pkg_switchboard_repo = https://github.com/thusfresh/switchboard
-pkg_switchboard_commit = master
-
-PACKAGES += syn
-pkg_syn_name = syn
-pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
-pkg_syn_homepage = https://github.com/ostinelli/syn
-pkg_syn_fetch = git
-pkg_syn_repo = https://github.com/ostinelli/syn
-pkg_syn_commit = master
-
-PACKAGES += sync
-pkg_sync_name = sync
-pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
-pkg_sync_homepage = https://github.com/rustyio/sync
-pkg_sync_fetch = git
-pkg_sync_repo = https://github.com/rustyio/sync
-pkg_sync_commit = master
-
-PACKAGES += syntaxerl
-pkg_syntaxerl_name = syntaxerl
-pkg_syntaxerl_description = Syntax checker for Erlang
-pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_fetch = git
-pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
-pkg_syntaxerl_commit = master
-
-PACKAGES += syslog
-pkg_syslog_name = syslog
-pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
-pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_fetch = git
-pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
-pkg_syslog_commit = master
-
-PACKAGES += taskforce
-pkg_taskforce_name = taskforce
-pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
-pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
-pkg_taskforce_fetch = git
-pkg_taskforce_repo = https://github.com/g-andrade/taskforce
-pkg_taskforce_commit = master
-
-PACKAGES += tddreloader
-pkg_tddreloader_name = tddreloader
-pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
-pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
-pkg_tddreloader_fetch = git
-pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
-pkg_tddreloader_commit = master
-
-PACKAGES += tempo
-pkg_tempo_name = tempo
-pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
-pkg_tempo_homepage = https://github.com/selectel/tempo
-pkg_tempo_fetch = git
-pkg_tempo_repo = https://github.com/selectel/tempo
-pkg_tempo_commit = master
-
-PACKAGES += ticktick
-pkg_ticktick_name = ticktick
-pkg_ticktick_description = Ticktick is an id generator for message service.
-pkg_ticktick_homepage = https://github.com/ericliang/ticktick
-pkg_ticktick_fetch = git
-pkg_ticktick_repo = https://github.com/ericliang/ticktick
-pkg_ticktick_commit = master
-
-PACKAGES += tinymq
-pkg_tinymq_name = tinymq
-pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
-pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_fetch = git
-pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
-pkg_tinymq_commit = master
-
-PACKAGES += tinymt
-pkg_tinymt_name = tinymt
-pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
-pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_fetch = git
-pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
-pkg_tinymt_commit = master
-
-PACKAGES += tirerl
-pkg_tirerl_name = tirerl
-pkg_tirerl_description = Erlang interface to Elastic Search
-pkg_tirerl_homepage = https://github.com/inaka/tirerl
-pkg_tirerl_fetch = git
-pkg_tirerl_repo = https://github.com/inaka/tirerl
-pkg_tirerl_commit = master
-
-PACKAGES += toml
-pkg_toml_name = toml
-pkg_toml_description = TOML (0.4.0) config parser
-pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
-pkg_toml_fetch = git
-pkg_toml_repo = https://github.com/dozzie/toml
-pkg_toml_commit = v0.2.0
-
-PACKAGES += traffic_tools
-pkg_traffic_tools_name = traffic_tools
-pkg_traffic_tools_description = Simple traffic limiting library
-pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
-pkg_traffic_tools_fetch = git
-pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
-pkg_traffic_tools_commit = master
-
-PACKAGES += trails
-pkg_trails_name = trails
-pkg_trails_description = A couple of improvements over Cowboy Routes
-pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
-pkg_trails_fetch = git
-pkg_trails_repo = https://github.com/inaka/cowboy-trails
-pkg_trails_commit = master
-
-PACKAGES += trane
-pkg_trane_name = trane
-pkg_trane_description = SAX style broken HTML parser in Erlang
-pkg_trane_homepage = https://github.com/massemanet/trane
-pkg_trane_fetch = git
-pkg_trane_repo = https://github.com/massemanet/trane
-pkg_trane_commit = master
-
-PACKAGES += transit
-pkg_transit_name = transit
-pkg_transit_description = transit format for erlang
-pkg_transit_homepage = https://github.com/isaiah/transit-erlang
-pkg_transit_fetch = git
-pkg_transit_repo = https://github.com/isaiah/transit-erlang
-pkg_transit_commit = master
-
-PACKAGES += trie
-pkg_trie_name = trie
-pkg_trie_description = Erlang Trie Implementation
-pkg_trie_homepage = https://github.com/okeuday/trie
-pkg_trie_fetch = git
-pkg_trie_repo = https://github.com/okeuday/trie
-pkg_trie_commit = master
-
-PACKAGES += triq
-pkg_triq_name = triq
-pkg_triq_description = Trifork QuickCheck
-pkg_triq_homepage = https://triq.gitlab.io
-pkg_triq_fetch = git
-pkg_triq_repo = https://gitlab.com/triq/triq.git
-pkg_triq_commit = master
-
-PACKAGES += tunctl
-pkg_tunctl_name = tunctl
-pkg_tunctl_description = Erlang TUN/TAP interface
-pkg_tunctl_homepage = https://github.com/msantos/tunctl
-pkg_tunctl_fetch = git
-pkg_tunctl_repo = https://github.com/msantos/tunctl
-pkg_tunctl_commit = master
-
-PACKAGES += twerl
-pkg_twerl_name = twerl
-pkg_twerl_description = Erlang client for the Twitter Streaming API
-pkg_twerl_homepage = https://github.com/lucaspiller/twerl
-pkg_twerl_fetch = git
-pkg_twerl_repo = https://github.com/lucaspiller/twerl
-pkg_twerl_commit = oauth
-
-PACKAGES += twitter_erlang
-pkg_twitter_erlang_name = twitter_erlang
-pkg_twitter_erlang_description = An Erlang twitter client
-pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_fetch = git
-pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
-pkg_twitter_erlang_commit = master
-
-PACKAGES += ucol_nif
-pkg_ucol_nif_name = ucol_nif
-pkg_ucol_nif_description = ICU based collation Erlang module
-pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_fetch = git
-pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
-pkg_ucol_nif_commit = master
-
-PACKAGES += unicorn
-pkg_unicorn_name = unicorn
-pkg_unicorn_description = Generic configuration server
-pkg_unicorn_homepage = https://github.com/shizzard/unicorn
-pkg_unicorn_fetch = git
-pkg_unicorn_repo = https://github.com/shizzard/unicorn
-pkg_unicorn_commit = master
-
-PACKAGES += unsplit
-pkg_unsplit_name = unsplit
-pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
-pkg_unsplit_homepage = https://github.com/uwiger/unsplit
-pkg_unsplit_fetch = git
-pkg_unsplit_repo = https://github.com/uwiger/unsplit
-pkg_unsplit_commit = master
-
-PACKAGES += uuid
-pkg_uuid_name = uuid
-pkg_uuid_description = Erlang UUID Implementation
-pkg_uuid_homepage = https://github.com/okeuday/uuid
-pkg_uuid_fetch = git
-pkg_uuid_repo = https://github.com/okeuday/uuid
-pkg_uuid_commit = master
-
-PACKAGES += ux
-pkg_ux_name = ux
-pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
-pkg_ux_homepage = https://github.com/erlang-unicode/ux
-pkg_ux_fetch = git
-pkg_ux_repo = https://github.com/erlang-unicode/ux
-pkg_ux_commit = master
-
-PACKAGES += vert
-pkg_vert_name = vert
-pkg_vert_description = Erlang binding to libvirt virtualization API
-pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
-pkg_vert_fetch = git
-pkg_vert_repo = https://github.com/msantos/erlang-libvirt
-pkg_vert_commit = master
-
-PACKAGES += verx
-pkg_verx_name = verx
-pkg_verx_description = Erlang implementation of the libvirtd remote protocol
-pkg_verx_homepage = https://github.com/msantos/verx
-pkg_verx_fetch = git
-pkg_verx_repo = https://github.com/msantos/verx
-pkg_verx_commit = master
-
-PACKAGES += vmq_acl
-pkg_vmq_acl_name = vmq_acl
-pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_acl_homepage = https://verne.mq/
-pkg_vmq_acl_fetch = git
-pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
-pkg_vmq_acl_commit = master
-
-PACKAGES += vmq_bridge
-pkg_vmq_bridge_name = vmq_bridge
-pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_bridge_homepage = https://verne.mq/
-pkg_vmq_bridge_fetch = git
-pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
-pkg_vmq_bridge_commit = master
-
-PACKAGES += vmq_graphite
-pkg_vmq_graphite_name = vmq_graphite
-pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_graphite_homepage = https://verne.mq/
-pkg_vmq_graphite_fetch = git
-pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
-pkg_vmq_graphite_commit = master
-
-PACKAGES += vmq_passwd
-pkg_vmq_passwd_name = vmq_passwd
-pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_passwd_homepage = https://verne.mq/
-pkg_vmq_passwd_fetch = git
-pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
-pkg_vmq_passwd_commit = master
-
-PACKAGES += vmq_server
-pkg_vmq_server_name = vmq_server
-pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_server_homepage = https://verne.mq/
-pkg_vmq_server_fetch = git
-pkg_vmq_server_repo = https://github.com/erlio/vmq_server
-pkg_vmq_server_commit = master
-
-PACKAGES += vmq_snmp
-pkg_vmq_snmp_name = vmq_snmp
-pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_snmp_homepage = https://verne.mq/
-pkg_vmq_snmp_fetch = git
-pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
-pkg_vmq_snmp_commit = master
-
-PACKAGES += vmq_systree
-pkg_vmq_systree_name = vmq_systree
-pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
-pkg_vmq_systree_homepage = https://verne.mq/
-pkg_vmq_systree_fetch = git
-pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
-pkg_vmq_systree_commit = master
-
-PACKAGES += vmstats
-pkg_vmstats_name = vmstats
-pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
-pkg_vmstats_homepage = https://github.com/ferd/vmstats
-pkg_vmstats_fetch = git
-pkg_vmstats_repo = https://github.com/ferd/vmstats
-pkg_vmstats_commit = master
-
-PACKAGES += walrus
-pkg_walrus_name = walrus
-pkg_walrus_description = Walrus - Mustache-like Templating
-pkg_walrus_homepage = https://github.com/devinus/walrus
-pkg_walrus_fetch = git
-pkg_walrus_repo = https://github.com/devinus/walrus
-pkg_walrus_commit = master
-
-PACKAGES += webmachine
-pkg_webmachine_name = webmachine
-pkg_webmachine_description = A REST-based system for building web applications.
-pkg_webmachine_homepage = https://github.com/basho/webmachine
-pkg_webmachine_fetch = git
-pkg_webmachine_repo = https://github.com/basho/webmachine
-pkg_webmachine_commit = master
-
-PACKAGES += websocket_client
-pkg_websocket_client_name = websocket_client
-pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
-pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_fetch = git
-pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
-pkg_websocket_client_commit = master
-
-PACKAGES += worker_pool
-pkg_worker_pool_name = worker_pool
-pkg_worker_pool_description = a simple erlang worker pool
-pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
-pkg_worker_pool_fetch = git
-pkg_worker_pool_repo = https://github.com/inaka/worker_pool
-pkg_worker_pool_commit = master
-
-PACKAGES += wrangler
-pkg_wrangler_name = wrangler
-pkg_wrangler_description = Import of the Wrangler svn repository.
-pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
-pkg_wrangler_fetch = git
-pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
-pkg_wrangler_commit = master
-
-PACKAGES += wsock
-pkg_wsock_name = wsock
-pkg_wsock_description = Erlang library to build WebSocket clients and servers
-pkg_wsock_homepage = https://github.com/madtrick/wsock
-pkg_wsock_fetch = git
-pkg_wsock_repo = https://github.com/madtrick/wsock
-pkg_wsock_commit = master
-
-PACKAGES += xhttpc
-pkg_xhttpc_name = xhttpc
-pkg_xhttpc_description = Extensible HTTP Client for Erlang
-pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
-pkg_xhttpc_fetch = git
-pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
-pkg_xhttpc_commit = master
-
-PACKAGES += xref_runner
-pkg_xref_runner_name = xref_runner
-pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
-pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
-pkg_xref_runner_fetch = git
-pkg_xref_runner_repo = https://github.com/inaka/xref_runner
-pkg_xref_runner_commit = master
-
-PACKAGES += yamerl
-pkg_yamerl_name = yamerl
-pkg_yamerl_description = YAML 1.2 parser in pure Erlang
-pkg_yamerl_homepage = https://github.com/yakaz/yamerl
-pkg_yamerl_fetch = git
-pkg_yamerl_repo = https://github.com/yakaz/yamerl
-pkg_yamerl_commit = master
-
-PACKAGES += yamler
-pkg_yamler_name = yamler
-pkg_yamler_description = libyaml-based yaml loader for Erlang
-pkg_yamler_homepage = https://github.com/goertzenator/yamler
-pkg_yamler_fetch = git
-pkg_yamler_repo = https://github.com/goertzenator/yamler
-pkg_yamler_commit = master
-
-PACKAGES += yaws
-pkg_yaws_name = yaws
-pkg_yaws_description = Yaws webserver
-pkg_yaws_homepage = http://yaws.hyber.org
-pkg_yaws_fetch = git
-pkg_yaws_repo = https://github.com/klacke/yaws
-pkg_yaws_commit = master
-
-PACKAGES += zab_engine
-pkg_zab_engine_name = zab_engine
-pkg_zab_engine_description = zab propotocol implement by erlang
-pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_fetch = git
-pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
-pkg_zab_engine_commit = master
-
-PACKAGES += zabbix_sender
-pkg_zabbix_sender_name = zabbix_sender
-pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
-pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
-pkg_zabbix_sender_fetch = git
-pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
-pkg_zabbix_sender_commit = master
-
-PACKAGES += zeta
-pkg_zeta_name = zeta
-pkg_zeta_description = HTTP access log parser in Erlang
-pkg_zeta_homepage = https://github.com/s1n4/zeta
-pkg_zeta_fetch = git
-pkg_zeta_repo = https://github.com/s1n4/zeta
-pkg_zeta_commit = master
-
-PACKAGES += zippers
-pkg_zippers_name = zippers
-pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
-pkg_zippers_homepage = https://github.com/ferd/zippers
-pkg_zippers_fetch = git
-pkg_zippers_repo = https://github.com/ferd/zippers
-pkg_zippers_commit = master
-
-PACKAGES += zlists
-pkg_zlists_name = zlists
-pkg_zlists_description = Erlang lazy lists library.
-pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
-pkg_zlists_fetch = git
-pkg_zlists_repo = https://github.com/vjache/erlang-zlists
-pkg_zlists_commit = master
-
-PACKAGES += zraft_lib
-pkg_zraft_lib_name = zraft_lib
-pkg_zraft_lib_description = Erlang raft consensus protocol implementation
-pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_fetch = git
-pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
-pkg_zraft_lib_commit = master
-
-PACKAGES += zucchini
-pkg_zucchini_name = zucchini
-pkg_zucchini_description = An Erlang INI parser
-pkg_zucchini_homepage = https://github.com/devinus/zucchini
-pkg_zucchini_fetch = git
-pkg_zucchini_repo = https://github.com/devinus/zucchini
-pkg_zucchini_commit = master
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: search
-
-define pkg_print
- $(verbose) printf "%s\n" \
- $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
- "App name: $(pkg_$(1)_name)" \
- "Description: $(pkg_$(1)_description)" \
- "Home page: $(pkg_$(1)_homepage)" \
- "Fetch with: $(pkg_$(1)_fetch)" \
- "Repository: $(pkg_$(1)_repo)" \
- "Commit: $(pkg_$(1)_commit)" \
- ""
-
-endef
-
-search:
-ifdef q
- $(foreach p,$(PACKAGES), \
- $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
- $(call pkg_print,$(p))))
-else
- $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-deps clean-tmp-deps.log
-
-# Configuration.
-
-ifdef OTP_DEPS
-$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
-endif
-
-IGNORE_DEPS ?=
-export IGNORE_DEPS
-
-APPS_DIR ?= $(CURDIR)/apps
-export APPS_DIR
-
-DEPS_DIR ?= $(CURDIR)/deps
-export DEPS_DIR
-
-REBAR_DEPS_DIR = $(DEPS_DIR)
-export REBAR_DEPS_DIR
-
-REBAR_GIT ?= https://github.com/rebar/rebar
-REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
-
-# External "early" plugins (see core/plugins.mk for regular plugins).
-# They both use the core_dep_plugin macro.
-
-define core_dep_plugin
-ifeq ($(2),$(PROJECT))
--include $$(patsubst $(PROJECT)/%,%,$(1))
-else
--include $(DEPS_DIR)/$(1)
-
-$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
-endif
-endef
-
-DEP_EARLY_PLUGINS ?=
-
-$(foreach p,$(DEP_EARLY_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/early-plugins.mk,$p))))
-
-# Query functions.
-
-query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
-_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
-_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
-
-query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
-
-query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
-_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
-
-query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
-query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
-query_repo_git-subfolder = $(call query_repo_git,$(1))
-query_repo_git-submodule = -
-query_repo_hg = $(call query_repo_default,$(1))
-query_repo_svn = $(call query_repo_default,$(1))
-query_repo_cp = $(call query_repo_default,$(1))
-query_repo_ln = $(call query_repo_default,$(1))
-query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
-query_repo_fail = -
-query_repo_legacy = -
-
-query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
-_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
-
-query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_git = $(call query_version_default,$(1))
-query_version_git-subfolder = $(call query_version_git,$(1))
-query_version_git-submodule = -
-query_version_hg = $(call query_version_default,$(1))
-query_version_svn = -
-query_version_cp = -
-query_version_ln = -
-query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
-query_version_fail = -
-query_version_legacy = -
-
-query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
-_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
-
-query_extra_git = -
-query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
-query_extra_git-submodule = -
-query_extra_hg = -
-query_extra_svn = -
-query_extra_cp = -
-query_extra_ln = -
-query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
-query_extra_fail = -
-query_extra_legacy = -
-
-query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
-
-# Deprecated legacy query functions.
-dep_fetch = $(call query_fetch_method,$(1))
-dep_name = $(call query_name,$(1))
-dep_repo = $(call query_repo_git,$(1))
-dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
-
-LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
-ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
-
-# When we are calling an app directly we don't want to include it here
-# otherwise it'll be treated both as an apps and a top-level project.
-ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
-ifdef ROOT_DIR
-ifndef IS_APP
-ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
-endif
-endif
-
-ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
-ifeq ($(ERL_LIBS),)
- ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
-else
- ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
-endif
-endif
-export ERL_LIBS
-
-export NO_AUTOPATCH
-
-# Verbosity.
-
-dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
-dep_verbose_2 = set -x;
-dep_verbose = $(dep_verbose_$(V))
-
-# Optimization: don't recompile deps unless truly necessary.
-
-ifndef IS_DEP
-ifneq ($(MAKELEVEL),0)
-$(shell rm -f ebin/dep_built)
-endif
-endif
-
-# Core targets.
-
-ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
-
-apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
-# Create ebin directory for all apps to make sure Erlang recognizes them
-# as proper OTP applications when using -include_lib. This is a temporary
-# fix, a proper fix would be to compile apps/* in the right order.
-ifndef IS_APP
-ifneq ($(ALL_APPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- mkdir -p $$dep/ebin; \
- done
-endif
-endif
-# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
-# compile that list of apps. Otherwise, compile everything.
-# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
-ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
- $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
- fi \
- done
-endif
-
-clean-tmp-deps.log:
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
-endif
-
-# Erlang.mk does not rebuild dependencies after they were compiled
-# once. If a developer is working on the top-level project and some
-# dependencies at the same time, he may want to change this behavior.
-# There are two solutions:
-# 1. Set `FULL=1` so that all dependencies are visited and
-# recursively recompiled if necessary.
-# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
-# should be recompiled (instead of the whole set).
-
-FORCE_REBUILD ?=
-
-ifeq ($(origin FULL),undefined)
-ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
-define force_rebuild_dep
-echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
-endef
-endif
-endif
-
-ifneq ($(SKIP_DEPS),)
-deps::
-else
-deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
-ifneq ($(ALL_DEPS_DIRS),)
- $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
- if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
- :; \
- else \
- echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
- if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- else \
- echo "Error: No Makefile to build dependency $$dep." >&2; \
- exit 2; \
- fi \
- fi \
- done
-endif
-endif
-
-# Deps related targets.
-
-# @todo rename GNUmakefile and makefile into Makefile first, if they exist
-# While Makefile file could be GNUmakefile or makefile,
-# in practice only Makefile is needed so far.
-define dep_autopatch
- if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
- rm -rf $(DEPS_DIR)/$1/ebin/; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- $(call dep_autopatch_erlang_mk,$(1)); \
- elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch2,$1); \
- elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
- $(call dep_autopatch2,$(1)); \
- elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
- $(call dep_autopatch2,$(1)); \
- fi \
- else \
- if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
- $(call dep_autopatch_noop,$(1)); \
- else \
- $(call dep_autopatch2,$(1)); \
- fi \
- fi
-endef
-
-define dep_autopatch2
- ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
- mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
- rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
- if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
- $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
- fi; \
- $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
- if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
- $(call dep_autopatch_fetch_rebar); \
- $(call dep_autopatch_rebar,$(1)); \
- else \
- $(call dep_autopatch_gen,$(1)); \
- fi
-endef
-
-define dep_autopatch_noop
- printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
-# if given. Do it for all 3 possible Makefile file names.
-ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
-define dep_autopatch_erlang_mk
- for f in Makefile makefile GNUmakefile; do \
- if [ -f $(DEPS_DIR)/$1/$$f ]; then \
- sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
- fi \
- done
-endef
-else
-define dep_autopatch_erlang_mk
- :
-endef
-endif
-
-define dep_autopatch_gen
- printf "%s\n" \
- "ERLC_OPTS = +debug_info" \
- "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
-endef
-
-# We use flock/lockf when available to avoid concurrency issues.
-define dep_autopatch_fetch_rebar
- if command -v flock >/dev/null; then \
- flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- elif command -v lockf >/dev/null; then \
- lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
- else \
- $(call dep_autopatch_fetch_rebar2); \
- fi
-endef
-
-define dep_autopatch_fetch_rebar2
- if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
- git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
- cd $(ERLANG_MK_TMP)/rebar; \
- git checkout -q $(REBAR_COMMIT); \
- ./bootstrap; \
- cd -; \
- fi
-endef
-
-define dep_autopatch_rebar
- if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
- mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
- fi; \
- $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
- rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
-endef
-
-define dep_autopatch_rebar.erl
- application:load(rebar),
- application:set_env(rebar, log_level, debug),
- rmemo:start(),
- Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
- {ok, Conf0} -> Conf0;
- _ -> []
- end,
- {Conf, OsEnv} = fun() ->
- case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
- false -> {Conf1, []};
- true ->
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
- Before = os:getenv(),
- {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
- {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
- end
- end(),
- Write = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
- end,
- Escape = fun (Text) ->
- re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
- end,
- Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
- "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
- Write("C_SRC_DIR = /path/do/not/exist\n"),
- Write("C_SRC_TYPE = rebar\n"),
- Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
- Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
- ToList = fun
- (V) when is_atom(V) -> atom_to_list(V);
- (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
- end,
- fun() ->
- Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
- case lists:keyfind(erl_opts, 1, Conf) of
- false -> ok;
- {_, ErlOpts} ->
- lists:foreach(fun
- ({d, D}) ->
- Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- ({d, DKey, DVal}) ->
- Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
- ({i, I}) ->
- Write(["ERLC_OPTS += -I ", I, "\n"]);
- ({platform_define, Regex, D}) ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
- false -> ok
- end;
- ({parse_transform, PT}) ->
- Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
- (_) -> ok
- end, ErlOpts)
- end,
- Write("\n")
- end(),
- GetHexVsn = fun(N, NP) ->
- case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
- {ok, Lock} ->
- io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
- io:format("~p~n", [LockPkgs]),
- case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
- {_, {pkg, _, Vsn}, _} ->
- io:format("~p~n", [Vsn]),
- {N, {hex, NP, binary_to_list(Vsn)}};
- _ ->
- false
- end;
- _ ->
- false
- end;
- _ ->
- false
- end
- end,
- SemVsn = fun
- ("~>" ++ S0) ->
- S = case S0 of
- " " ++ S1 -> S1;
- _ -> S0
- end,
- case length([ok || $$. <- S]) of
- 0 -> S ++ ".0.0";
- 1 -> S ++ ".0";
- _ -> S
- end;
- (S) -> S
- end,
- fun() ->
- File = case lists:keyfind(deps, 1, Conf) of
- false -> [];
- {_, Deps} ->
- [begin case case Dep of
- N when is_atom(N) -> GetHexVsn(N, N);
- {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
- {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
- {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
- {N, S} when is_tuple(S) -> {N, S};
- {N, _, S} -> {N, S};
- {N, _, S, _} -> {N, S};
- _ -> false
- end of
- false -> ok;
- {Name, Source} ->
- {Method, Repo, Commit} = case Source of
- {hex, NPV, V} -> {hex, V, NPV};
- {git, R} -> {git, R, master};
- {M, R, {branch, C}} -> {M, R, C};
- {M, R, {ref, C}} -> {M, R, C};
- {M, R, {tag, C}} -> {M, R, C};
- {M, R, C} -> {M, R, C}
- end,
- Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
- end end || Dep <- Deps]
- end
- end(),
- fun() ->
- case lists:keyfind(erl_first_files, 1, Conf) of
- false -> ok;
- {_, Files} ->
- Names = [[" ", case lists:reverse(F) of
- "lre." ++ Elif -> lists:reverse(Elif);
- "lrx." ++ Elif -> lists:reverse(Elif);
- "lry." ++ Elif -> lists:reverse(Elif);
- Elif -> lists:reverse(Elif)
- end] || "src/" ++ F <- Files],
- Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
- end
- end(),
- Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
- Write("\npreprocess::\n"),
- Write("\npre-deps::\n"),
- Write("\npre-app::\n"),
- PatchHook = fun(Cmd) ->
- Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
- case Cmd2 of
- "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
- "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
- _ -> Escape(Cmd)
- end
- end,
- fun() ->
- case lists:keyfind(pre_hooks, 1, Conf) of
- false -> ok;
- {_, Hooks} ->
- [case H of
- {'get-deps', Cmd} ->
- Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
- {compile, Cmd} ->
- Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- {Regex, compile, Cmd} ->
- case rebar_utils:is_arch(Regex) of
- true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
- false -> ok
- end;
- _ -> ok
- end || H <- Hooks]
- end
- end(),
- ShellToMk = fun(V0) ->
- V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
- V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
- re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
- end,
- PortSpecs = fun() ->
- case lists:keyfind(port_specs, 1, Conf) of
- false ->
- case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
- false -> [];
- true ->
- [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
- proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
- end;
- {_, Specs} ->
- lists:flatten([case S of
- {Output, Input} -> {ShellToMk(Output), Input, []};
- {Regex, Output, Input} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, []};
- false -> []
- end;
- {Regex, Output, Input, [{env, Env}]} ->
- case rebar_utils:is_arch(Regex) of
- true -> {ShellToMk(Output), Input, Env};
- false -> []
- end
- end || S <- Specs])
- end
- end(),
- PortSpecWrite = fun (Text) ->
- file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
- end,
- case PortSpecs of
- [] -> ok;
- _ ->
- Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
- PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
- [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
- PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
- [code:lib_dir(erl_interface, lib)])),
- [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
- FilterEnv = fun(Env) ->
- lists:flatten([case E of
- {_, _} -> E;
- {Regex, K, V} ->
- case rebar_utils:is_arch(Regex) of
- true -> {K, V};
- false -> []
- end
- end || E <- Env])
- end,
- MergeEnv = fun(Env) ->
- lists:foldl(fun ({K, V}, Acc) ->
- case lists:keyfind(K, 1, Acc) of
- false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
- {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
- end
- end, [], Env)
- end,
- PortEnv = case lists:keyfind(port_env, 1, Conf) of
- false -> [];
- {_, PortEnv0} -> FilterEnv(PortEnv0)
- end,
- PortSpec = fun ({Output, Input0, Env}) ->
- filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
- Input = [[" ", I] || I <- Input0],
- PortSpecWrite([
- [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
- case $(PLATFORM) of
- darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
- _ -> ""
- end,
- "\n\nall:: ", Output, "\n\t@:\n\n",
- "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
- [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
- Output, ": $$\(foreach ext,.c .C .cc .cpp,",
- "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
- "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
- case {filename:extension(Output), $(PLATFORM)} of
- {[], _} -> "\n";
- {_, darwin} -> "\n";
- _ -> " -shared\n"
- end])
- end,
- [PortSpec(S) || S <- PortSpecs]
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_tuple(P)],
- case lists:keyfind('lfe-compile', 1, Plugins) of
- false -> ok;
- _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
- end
- end
- end(),
- Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
- RunPlugin = fun(Plugin, Step) ->
- case erlang:function_exported(Plugin, Step, 2) of
- false -> ok;
- true ->
- c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
- Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
- dict:store(base_dir, "", dict:new())}, undefined),
- io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
- end
- end,
- fun() ->
- case lists:keyfind(plugins, 1, Conf) of
- false -> ok;
- {_, Plugins0} ->
- Plugins = [P || P <- Plugins0, is_atom(P)],
- [begin
- case lists:keyfind(deps, 1, Conf) of
- false -> ok;
- {_, Deps} ->
- case lists:keyfind(P, 1, Deps) of
- false -> ok;
- _ ->
- Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
- io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
- io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
- code:add_patha(Path ++ "/ebin")
- end
- end
- end || P <- Plugins],
- [case code:load_file(P) of
- {module, P} -> ok;
- _ ->
- case lists:keyfind(plugin_dir, 1, Conf) of
- false -> ok;
- {_, PluginsDir} ->
- ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
- {ok, P, Bin} = compile:file(ErlFile, [binary]),
- {module, P} = code:load_binary(P, ErlFile, Bin)
- end
- end || P <- Plugins],
- [RunPlugin(P, preprocess) || P <- Plugins],
- [RunPlugin(P, pre_compile) || P <- Plugins],
- [RunPlugin(P, compile) || P <- Plugins]
- end
- end(),
- halt()
-endef
-
-define dep_autopatch_appsrc_script.erl
- AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcScript = AppSrc ++ ".script",
- {ok, Conf0} = file:consult(AppSrc),
- Bindings0 = erl_eval:new_bindings(),
- Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
- Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
- Conf = case file:script(AppSrcScript, Bindings) of
- {ok, [C]} -> C;
- {ok, C} -> C
- end,
- ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
- halt()
-endef
-
-define dep_autopatch_appsrc.erl
- AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
- AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
- case filelib:is_regular(AppSrcIn) of
- false -> ok;
- true ->
- {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
- L1 = lists:keystore(modules, 1, L0, {modules, []}),
- L2 = case lists:keyfind(vsn, 1, L1) of
- {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
- {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
- _ -> L1
- end,
- L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
- ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
- case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
- end,
- halt()
-endef
-
-define dep_fetch_git
- git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_git-subfolder
- mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
- git clone -q -n -- $(call dep_repo,$1) \
- $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
- cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
- && git checkout -q $(call dep_commit,$1); \
- ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
- $(DEPS_DIR)/$(call dep_name,$1);
-endef
-
-define dep_fetch_git-submodule
- git submodule update --init -- $(DEPS_DIR)/$1;
-endef
-
-define dep_fetch_hg
- hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
-endef
-
-define dep_fetch_svn
- svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_cp
- cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-define dep_fetch_ln
- ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
-endef
-
-# Hex only has a package version. No need to look in the Erlang.mk packages.
-define dep_fetch_hex
- mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
- $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
- https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
- tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
-endef
-
-define dep_fetch_fail
- echo "Error: Unknown or invalid dependency: $(1)." >&2; \
- exit 78;
-endef
-
-# Kept for compatibility purposes with older Erlang.mk configuration.
-define dep_fetch_legacy
- $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
- git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
- cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
-endef
-
-define dep_target
-$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
- $(eval DEP_NAME := $(call dep_name,$1))
- $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
- $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
- echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
- exit 17; \
- fi
- $(verbose) mkdir -p $(DEPS_DIR)
- $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
- $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
- && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
- echo " AUTO " $(DEP_STR); \
- cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
- fi
- - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
- echo " CONF " $(DEP_STR); \
- cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
- fi
-ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
- $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
-endif
-
-.PHONY: autopatch-$(call dep_name,$1)
-
-autopatch-$(call dep_name,$1)::
- $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi; \
- if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
- echo " PATCH Downloading rabbitmq-server"; \
- git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
- fi; \
- ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
- elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
- if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
- echo " PATCH Downloading rabbitmq-codegen"; \
- git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
- fi \
- elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
- ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
- else \
- $$(call dep_autopatch,$(call dep_name,$1)) \
- fi
-endef
-
-$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
-
-ifndef IS_APP
-clean:: clean-apps
-
-clean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep clean IS_APP=1; \
- done
-
-distclean:: distclean-apps
-
-distclean-apps:
- $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
- $(MAKE) -C $$dep distclean IS_APP=1; \
- done
-endif
-
-ifndef SKIP_DEPS
-distclean:: distclean-deps
-
-distclean-deps:
- $(gen_verbose) rm -rf $(DEPS_DIR)
-endif
-
-# Forward-declare variables used in core/deps-tools.mk. This is required
-# in case plugins use them.
-
-ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
-ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
-ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
-ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
-ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
-
-ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
-ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
-ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
-ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
-ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-app
-
-# Configuration.
-
-ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
- +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
-COMPILE_FIRST ?=
-COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
-ERLC_EXCLUDE ?=
-ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
-
-ERLC_ASN1_OPTS ?=
-
-ERLC_MIB_OPTS ?=
-COMPILE_MIB_FIRST ?=
-COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
-
-# Verbosity.
-
-app_verbose_0 = @echo " APP " $(PROJECT);
-app_verbose_2 = set -x;
-app_verbose = $(app_verbose_$(V))
-
-appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
-appsrc_verbose_2 = set -x;
-appsrc_verbose = $(appsrc_verbose_$(V))
-
-makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
-makedep_verbose_2 = set -x;
-makedep_verbose = $(makedep_verbose_$(V))
-
-erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(?F)));
-erlc_verbose_2 = set -x;
-erlc_verbose = $(erlc_verbose_$(V))
-
-xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
-xyrl_verbose_2 = set -x;
-xyrl_verbose = $(xyrl_verbose_$(V))
-
-asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
-asn1_verbose_2 = set -x;
-asn1_verbose = $(asn1_verbose_$(V))
-
-mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
-mib_verbose_2 = set -x;
-mib_verbose = $(mib_verbose_$(V))
-
-ifneq ($(wildcard src/),)
-
-# Targets.
-
-app:: $(if $(wildcard ebin/test),clean) deps
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
- $(verbose) $(MAKE) --no-print-directory app-build
-
-ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, []},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-else
-define app_file
-{application, '$(PROJECT)', [
- {description, "$(PROJECT_DESCRIPTION)"},
- {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
- {id$(comma)$(space)"$(1)"}$(comma))
- {modules, [$(call comma_list,$(2))]},
- {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
- {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
- {mod, {$(PROJECT_MOD), []}},
- {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
-]}.
-endef
-endif
-
-app-build: ebin/$(PROJECT).app
- $(verbose) :
-
-# Source files.
-
-ALL_SRC_FILES := $(sort $(call core_find,src/,*))
-
-ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
-CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
-
-# ASN.1 files.
-
-ifneq ($(wildcard asn1/),)
-ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
-ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-define compile_asn1
- $(verbose) mkdir -p include/
- $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
- $(verbose) mv asn1/*.erl src/
- -$(verbose) mv asn1/*.hrl include/
- $(verbose) mv asn1/*.asn1db include/
-endef
-
-$(PROJECT).d:: $(ASN1_FILES)
- $(if $(strip $?),$(call compile_asn1,$?))
-endif
-
-# SNMP MIB files.
-
-ifneq ($(wildcard mibs/),)
-MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
-
-$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
- $(verbose) mkdir -p include/ priv/mibs/
- $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
- $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
-endif
-
-# Leex and Yecc files.
-
-XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
-XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
-ERL_FILES += $(XRL_ERL_FILES)
-
-YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
-YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
-ERL_FILES += $(YRL_ERL_FILES)
-
-$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
- $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
-
-# Erlang and Core Erlang files.
-
-define makedep.erl
- E = ets:new(makedep, [bag]),
- G = digraph:new([acyclic]),
- ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
- DepsDir = "$(call core_native_path,$(DEPS_DIR))",
- AppsDir = "$(call core_native_path,$(APPS_DIR))",
- DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
- DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
- AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
- AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
- DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
- AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
- Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
- Add = fun (Mod, Dep) ->
- case lists:keyfind(Dep, 1, Modules) of
- false -> ok;
- {_, DepFile} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- ets:insert(E, {ModFile, DepFile}),
- digraph:add_vertex(G, Mod),
- digraph:add_vertex(G, Dep),
- digraph:add_edge(G, Mod, Dep)
- end
- end,
- AddHd = fun (F, Mod, DepFile) ->
- case file:open(DepFile, [read]) of
- {error, enoent} ->
- ok;
- {ok, Fd} ->
- {_, ModFile} = lists:keyfind(Mod, 1, Modules),
- case ets:match(E, {ModFile, DepFile}) of
- [] ->
- ets:insert(E, {ModFile, DepFile}),
- F(F, Fd, Mod,0);
- _ -> ok
- end
- end
- end,
- SearchHrl = fun
- F(_Hrl, []) -> {error,enoent};
- F(Hrl, [Dir|Dirs]) ->
- HrlF = filename:join([Dir,Hrl]),
- case filelib:is_file(HrlF) of
- true ->
- {ok, HrlF};
- false -> F(Hrl,Dirs)
- end
- end,
- Attr = fun
- (_F, Mod, behavior, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, behaviour, Dep) ->
- Add(Mod, Dep);
- (_F, Mod, compile, {parse_transform, Dep}) ->
- Add(Mod, Dep);
- (_F, Mod, compile, Opts) when is_list(Opts) ->
- case proplists:get_value(parse_transform, Opts) of
- undefined -> ok;
- Dep -> Add(Mod, Dep)
- end;
- (F, Mod, include, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, include_lib, Hrl) ->
- case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
- {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
- {error, _} -> false
- end;
- (F, Mod, import, {Imp, _}) ->
- IsFile =
- case lists:keyfind(Imp, 1, Modules) of
- false -> false;
- {_, FilePath} -> filelib:is_file(FilePath)
- end,
- case IsFile of
- false -> ok;
- true -> Add(Mod, Imp)
- end;
- (_, _, _, _) -> ok
- end,
- MakeDepend = fun
- (F, Fd, Mod, StartLocation) ->
- {ok, Filename} = file:pid2name(Fd),
- case io:parse_erl_form(Fd, undefined, StartLocation) of
- {ok, AbsData, EndLocation} ->
- case AbsData of
- {attribute, _, Key, Value} ->
- Attr(F, Mod, Key, Value),
- F(F, Fd, Mod, EndLocation);
- _ -> F(F, Fd, Mod, EndLocation)
- end;
- {eof, _ } -> file:close(Fd);
- {error, ErrorDescription } ->
- file:close(Fd);
- {error, ErrorInfo, ErrorLocation} ->
- F(F, Fd, Mod, ErrorLocation)
- end,
- ok
- end,
- [begin
- Mod = list_to_atom(filename:basename(F, ".erl")),
- case file:open(F, [read]) of
- {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
- {error, enoent} -> ok
- end
- end || F <- ErlFiles],
- Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
- CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
- TargetPath = fun(Target) ->
- case lists:keyfind(Target, 1, Modules) of
- false -> "";
- {_, DepFile} ->
- DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
- string:join(DirSubname ++ [atom_to_list(Target)], "/")
- end
- end,
- Output0 = [
- "# Generated by Erlang.mk. Edit at your own risk!\n\n",
- [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
- "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
- ],
- Output = case "é" of
- [233] -> unicode:characters_to_binary(Output0);
- _ -> Output0
- end,
- ok = file:write_file("$(1)", Output),
- halt()
-endef
-
-ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
-$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
- $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
-endif
-
-ifeq ($(IS_APP)$(IS_DEP),)
-ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
-# Rebuild everything when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
- touch -c $(PROJECT).d; \
- fi
- $(verbose) touch $@
-
-$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
-endif
-endif
-
-$(PROJECT).d::
- $(verbose) :
-
-include $(wildcard $(PROJECT).d)
-
-ebin/$(PROJECT).app:: ebin/
-
-ebin/:
- $(verbose) mkdir -p ebin/
-
-define compile_erl
- $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
- -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
-endef
-
-define validate_app_file
- case file:consult("ebin/$(PROJECT).app") of
- {ok, _} -> halt();
- _ -> halt(1)
- end
-endef
-
-ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
- $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
-# Older git versions do not have the --first-parent flag. Do without in that case.
- $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
- || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
-ifeq ($(wildcard src/$(PROJECT).app.src),)
- $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
- > ebin/$(PROJECT).app
- $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
- echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
- exit 1; \
- fi
-else
- $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
- echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
- exit 1; \
- fi
- $(appsrc_verbose) cat src/$(PROJECT).app.src \
- | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
- | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
- > ebin/$(PROJECT).app
-endif
-ifneq ($(wildcard src/$(PROJECT).appup),)
- $(verbose) cp src/$(PROJECT).appup ebin/
-endif
-
-clean:: clean-app
-
-clean-app:
- $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
- $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
- $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
- $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: docs-deps
-
-# Configuration.
-
-ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
-
-# Targets.
-
-$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-doc-deps:
-else
-doc-deps: $(ALL_DOC_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rel-deps
-
-# Configuration.
-
-ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
-
-# Targets.
-
-$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-rel-deps:
-else
-rel-deps: $(ALL_REL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: test-deps test-dir test-build clean-test-dir
-
-# Configuration.
-
-TEST_DIR ?= $(CURDIR)/test
-
-ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
-
-TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
-TEST_ERLC_OPTS += -DTEST=1
-
-# Targets.
-
-$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-test-deps:
-else
-test-deps: $(ALL_TEST_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-ifneq ($(wildcard $(TEST_DIR)),)
-test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
- @:
-
-test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
- $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
-test_erlc_verbose_2 = set -x;
-test_erlc_verbose = $(test_erlc_verbose_$(V))
-
-define compile_test_erl
- $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
- -pa ebin/ -I include/ $(1)
-endef
-
-ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
-$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
- $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
- $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
-endif
-
-test-build:: IS_TEST=1
-test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
-# We already compiled everything when IS_APP=1.
-ifndef IS_APP
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-# Roughly the same as test-build, but when IS_APP=1.
-# We only care about compiling the current application.
-ifdef IS_APP
-test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
-test-build-app:: deps test-deps
-ifneq ($(wildcard src),)
- $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
- $(gen_verbose) touch ebin/test
-endif
-ifneq ($(wildcard $(TEST_DIR)),)
- $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
-endif
-endif
-
-clean:: clean-test-dir
-
-clean-test-dir:
-ifneq ($(wildcard $(TEST_DIR)/*.beam),)
- $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: rebar.config
-
-# We strip out -Werror because we don't want to fail due to
-# warnings when used as a dependency.
-
-compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
-
-define compat_convert_erlc_opts
-$(if $(filter-out -Werror,$1),\
- $(if $(findstring +,$1),\
- $(shell echo $1 | cut -b 2-)))
-endef
-
-define compat_erlc_opts_to_list
-[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
-endef
-
-define compat_rebar_config
-{deps, [
-$(call comma_list,$(foreach d,$(DEPS),\
- $(if $(filter hex,$(call dep_fetch,$d)),\
- {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
- {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
-]}.
-{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
-endef
-
-rebar.config:
- $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
-
-.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Core targets.
-
-docs:: asciidoc
-
-distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
-
-# Plugin-specific targets.
-
-asciidoc: asciidoc-guide asciidoc-manual
-
-# User guide.
-
-ifeq ($(wildcard doc/src/guide/book.asciidoc),)
-asciidoc-guide:
-else
-asciidoc-guide: distclean-asciidoc-guide doc-deps
- a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
- a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
-
-distclean-asciidoc-guide:
- $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
-endif
-
-# Man pages.
-
-ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
-
-ifeq ($(ASCIIDOC_MANUAL_FILES),)
-asciidoc-manual:
-else
-
-# Configuration.
-
-MAN_INSTALL_PATH ?= /usr/local/share/man
-MAN_SECTIONS ?= 3 7
-MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
-MAN_VERSION ?= $(PROJECT_VERSION)
-
-# Plugin-specific targets.
-
-define asciidoc2man.erl
-try
- [begin
- io:format(" ADOC ~s~n", [F]),
- ok = asciideck:to_manpage(asciideck:parse_file(F), #{
- compress => gzip,
- outdir => filename:dirname(F),
- extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
- extra3 => "$(MAN_PROJECT) Function Reference"
- })
- end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
- halt(0)
-catch C:E ->
- io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
- halt(1)
-end.
-endef
-
-asciidoc-manual:: doc-deps
-
-asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
- $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
- $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
-
-install-docs:: install-asciidoc
-
-install-asciidoc: asciidoc-manual
- $(foreach s,$(MAN_SECTIONS),\
- mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
- install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
-
-distclean-asciidoc-manual:
- $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
-endif
-endif
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
-
-# Core targets.
-
-help::
- $(verbose) printf "%s\n" "" \
- "Bootstrap targets:" \
- " bootstrap Generate a skeleton of an OTP application" \
- " bootstrap-lib Generate a skeleton of an OTP library" \
- " bootstrap-rel Generate the files needed to build a release" \
- " new-app in=NAME Create a new local OTP application NAME" \
- " new-lib in=NAME Create a new local OTP library NAME" \
- " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
- " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
- " list-templates List available templates"
-
-# Bootstrap templates.
-
-define bs_appsrc
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]},
- {mod, {$p_app, []}},
- {env, []}
-]}.
-endef
-
-define bs_appsrc_lib
-{application, $p, [
- {description, ""},
- {vsn, "0.1.0"},
- {id, "git"},
- {modules, []},
- {registered, []},
- {applications, [
- kernel,
- stdlib
- ]}
-]}.
-endef
-
-# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
-# separately during the actual bootstrap.
-define bs_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-endef
-
-define bs_apps_Makefile
-PROJECT = $p
-PROJECT_DESCRIPTION = New project
-PROJECT_VERSION = 0.1.0
-$(if $(SP),
-# Whitespace to be used when creating files from templates.
-SP = $(SP)
-)
-# Make sure we know where the applications are located.
-ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
-APPS_DIR ?= ..
-DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
-
-include $$(ROOT_DIR)/erlang.mk
-endef
-
-define bs_app
--module($p_app).
--behaviour(application).
-
--export([start/2]).
--export([stop/1]).
-
-start(_Type, _Args) ->
- $p_sup:start_link().
-
-stop(_State) ->
- ok.
-endef
-
-define bs_relx_config
-{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
-{extended_start_script, true}.
-{sys_config, "config/sys.config"}.
-{vm_args, "config/vm.args"}.
-endef
-
-define bs_sys_config
-[
-].
-endef
-
-define bs_vm_args
--name $p@127.0.0.1
--setcookie $p
--heart
-endef
-
-# Normal templates.
-
-define tpl_supervisor
--module($(n)).
--behaviour(supervisor).
-
--export([start_link/0]).
--export([init/1]).
-
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-init([]) ->
- Procs = [],
- {ok, {{one_for_one, 1, 5}, Procs}}.
-endef
-
-define tpl_gen_server
--module($(n)).
--behaviour(gen_server).
-
-%% API.
--export([start_link/0]).
-
-%% gen_server.
--export([init/1]).
--export([handle_call/3]).
--export([handle_cast/2]).
--export([handle_info/2]).
--export([terminate/2]).
--export([code_change/3]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-%% gen_server.
-
-init([]) ->
- {ok, #state{}}.
-
-handle_call(_Request, _From, State) ->
- {reply, ignored, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-endef
-
-define tpl_module
--module($(n)).
--export([]).
-endef
-
-define tpl_cowboy_http
--module($(n)).
--behaviour(cowboy_http_handler).
-
--export([init/3]).
--export([handle/2]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {ok, Req, #state{}}.
-
-handle(Req, State=#state{}) ->
- {ok, Req2} = cowboy_req:reply(200, Req),
- {ok, Req2, State}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_gen_fsm
--module($(n)).
--behaviour(gen_fsm).
-
-%% API.
--export([start_link/0]).
-
-%% gen_fsm.
--export([init/1]).
--export([state_name/2]).
--export([handle_event/3]).
--export([state_name/3]).
--export([handle_sync_event/4]).
--export([handle_info/3]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_fsm:start_link(?MODULE, [], []).
-
-%% gen_fsm.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_Event, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_Event, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-state_name(_Event, _From, StateData) ->
- {reply, ignored, state_name, StateData}.
-
-handle_sync_event(_Event, _From, StateName, StateData) ->
- {reply, ignored, StateName, StateData}.
-
-handle_info(_Info, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_gen_statem
--module($(n)).
--behaviour(gen_statem).
-
-%% API.
--export([start_link/0]).
-
-%% gen_statem.
--export([callback_mode/0]).
--export([init/1]).
--export([state_name/3]).
--export([handle_event/4]).
--export([terminate/3]).
--export([code_change/4]).
-
--record(state, {
-}).
-
-%% API.
-
--spec start_link() -> {ok, pid()}.
-start_link() ->
- gen_statem:start_link(?MODULE, [], []).
-
-%% gen_statem.
-
-callback_mode() ->
- state_functions.
-
-init([]) ->
- {ok, state_name, #state{}}.
-
-state_name(_EventType, _EventData, StateData) ->
- {next_state, state_name, StateData}.
-
-handle_event(_EventType, _EventData, StateName, StateData) ->
- {next_state, StateName, StateData}.
-
-terminate(_Reason, _StateName, _StateData) ->
- ok.
-
-code_change(_OldVsn, StateName, StateData, _Extra) ->
- {ok, StateName, StateData}.
-endef
-
-define tpl_cowboy_loop
--module($(n)).
--behaviour(cowboy_loop_handler).
-
--export([init/3]).
--export([info/3]).
--export([terminate/3]).
-
--record(state, {
-}).
-
-init(_, Req, _Opts) ->
- {loop, Req, #state{}, 5000, hibernate}.
-
-info(_Info, Req, State) ->
- {loop, Req, State, hibernate}.
-
-terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_cowboy_rest
--module($(n)).
-
--export([init/3]).
--export([content_types_provided/2]).
--export([get_html/2]).
-
-init(_, _Req, _Opts) ->
- {upgrade, protocol, cowboy_rest}.
-
-content_types_provided(Req, State) ->
- {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
-
-get_html(Req, State) ->
- {<<"<html><body>This is REST!</body></html>">>, Req, State}.
-endef
-
-define tpl_cowboy_ws
--module($(n)).
--behaviour(cowboy_websocket_handler).
-
--export([init/3]).
--export([websocket_init/3]).
--export([websocket_handle/3]).
--export([websocket_info/3]).
--export([websocket_terminate/3]).
-
--record(state, {
-}).
-
-init(_, _, _) ->
- {upgrade, protocol, cowboy_websocket}.
-
-websocket_init(_, Req, _Opts) ->
- Req2 = cowboy_req:compact(Req),
- {ok, Req2, #state{}}.
-
-websocket_handle({text, Data}, Req, State) ->
- {reply, {text, Data}, Req, State};
-websocket_handle({binary, Data}, Req, State) ->
- {reply, {binary, Data}, Req, State};
-websocket_handle(_Frame, Req, State) ->
- {ok, Req, State}.
-
-websocket_info(_Info, Req, State) ->
- {ok, Req, State}.
-
-websocket_terminate(_Reason, _Req, _State) ->
- ok.
-endef
-
-define tpl_ranch_protocol
--module($(n)).
--behaviour(ranch_protocol).
-
--export([start_link/4]).
--export([init/4]).
-
--type opts() :: [].
--export_type([opts/0]).
-
--record(state, {
- socket :: inet:socket(),
- transport :: module()
-}).
-
-start_link(Ref, Socket, Transport, Opts) ->
- Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
- {ok, Pid}.
-
--spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
-init(Ref, Socket, Transport, _Opts) ->
- ok = ranch:accept_ack(Ref),
- loop(#state{socket=Socket, transport=Transport}).
-
-loop(State) ->
- loop(State).
-endef
-
-# Plugin-specific targets.
-
-ifndef WS
-ifdef SP
-WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
-else
-WS = $(tab)
-endif
-endif
-
-bootstrap:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(PROJECT)_sup)
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
-endif
- $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
-
-bootstrap-lib:
-ifneq ($(wildcard src/),)
- $(error Error: src/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) $(call core_render,bs_Makefile,Makefile)
- $(verbose) echo "include erlang.mk" >> Makefile
- $(verbose) mkdir src/
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
-endif
-
-bootstrap-rel:
-ifneq ($(wildcard relx.config),)
- $(error Error: relx.config already exists)
-endif
-ifneq ($(wildcard config/),)
- $(error Error: config/ directory already exists)
-endif
- $(eval p := $(PROJECT))
- $(verbose) $(call core_render,bs_relx_config,relx.config)
- $(verbose) mkdir config/
- $(verbose) $(call core_render,bs_sys_config,config/sys.config)
- $(verbose) $(call core_render,bs_vm_args,config/vm.args)
-
-new-app:
-ifndef in
- $(error Usage: $(MAKE) new-app in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(eval n := $(in)_sup)
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
-endif
- $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
- $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
-
-new-lib:
-ifndef in
- $(error Usage: $(MAKE) new-lib in=APP)
-endif
-ifneq ($(wildcard $(APPS_DIR)/$in),)
- $(error Error: Application $in already exists)
-endif
- $(eval p := $(in))
- $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
- $(error Error: Invalid characters in the application name))
- $(verbose) mkdir -p $(APPS_DIR)/$p/src/
- $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
-ifdef LEGACY
- $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
-endif
-
-new:
-ifeq ($(wildcard src/)$(in),)
- $(error Error: src/ directory does not exist)
-endif
-ifndef t
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifndef n
- $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
-else
- $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
-endif
-
-list-templates:
- $(verbose) @echo Available templates:
- $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
-
-# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: clean-c_src distclean-c_src-env
-
-# Configuration.
-
-C_SRC_DIR ?= $(CURDIR)/c_src
-C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
-C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
-C_SRC_TYPE ?= shared
-
-# System type and C compiler/flags.
-
-ifeq ($(PLATFORM),msys2)
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
-else
- C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
- C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
-endif
-
-ifeq ($(C_SRC_TYPE),shared)
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
-else
- C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
-endif
-
-ifeq ($(PLATFORM),msys2)
-# We hardcode the compiler used on MSYS2. The default CC=cc does
-# not produce working code. The "gcc" MSYS2 package also doesn't.
- CC = /mingw64/bin/gcc
- export CC
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),darwin)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -arch x86_64 -Wall
- LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
-else ifeq ($(PLATFORM),freebsd)
- CC ?= cc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-else ifeq ($(PLATFORM),linux)
- CC ?= gcc
- CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
- CXXFLAGS ?= -O3 -finline-functions -Wall
-endif
-
-ifneq ($(PLATFORM),msys2)
- CFLAGS += -fPIC
- CXXFLAGS += -fPIC
-endif
-
-CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
-
-LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
-
-# Verbosity.
-
-c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-c_verbose = $(c_verbose_$(V))
-
-cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
-cpp_verbose = $(cpp_verbose_$(V))
-
-link_verbose_0 = @echo " LD " $(@F);
-link_verbose = $(link_verbose_$(V))
-
-# Targets.
-
-ifeq ($(wildcard $(C_SRC_DIR)),)
-else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
-app:: app-c_src
-
-test-build:: app-c_src
-
-app-c_src:
- $(MAKE) -C $(C_SRC_DIR)
-
-clean::
- $(MAKE) -C $(C_SRC_DIR) clean
-
-else
-
-ifeq ($(SOURCES),)
-SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
-endif
-OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
-
-COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
-COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
-
-app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
-
-$(C_SRC_OUTPUT_FILE): $(OBJECTS)
- $(verbose) mkdir -p $(dir $@)
- $(link_verbose) $(CC) $(OBJECTS) \
- $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
- -o $(C_SRC_OUTPUT_FILE)
-
-$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
-
-%.o: %.c
- $(COMPILE_C) $(OUTPUT_OPTION) $<
-
-%.o: %.cc
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.C
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-%.o: %.cpp
- $(COMPILE_CPP) $(OUTPUT_OPTION) $<
-
-clean:: clean-c_src
-
-clean-c_src:
- $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
-
-endif
-
-ifneq ($(wildcard $(C_SRC_DIR)),)
-ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
-
-$(C_SRC_ENV):
- $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
- io_lib:format( \
- \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
- \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
- \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
- \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
- \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
- [code:root_dir(), erlang:system_info(version), \
- code:lib_dir(erl_interface, include), \
- code:lib_dir(erl_interface, lib)])), \
- halt()."
-
-distclean:: distclean-c_src-env
-
-distclean-c_src-env:
- $(gen_verbose) rm -f $(C_SRC_ENV)
-
--include $(C_SRC_ENV)
-
-ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
-$(shell rm -f $(C_SRC_ENV))
-endif
-endif
-
-# Templates.
-
-define bs_c_nif
-#include "erl_nif.h"
-
-static int loads = 0;
-
-static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
-{
- /* Initialize private data. */
- *priv_data = NULL;
-
- loads++;
-
- return 0;
-}
-
-static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
-{
- /* Convert the private data to the new version. */
- *priv_data = *old_priv_data;
-
- loads++;
-
- return 0;
-}
-
-static void unload(ErlNifEnv* env, void* priv_data)
-{
- if (loads == 1) {
- /* Destroy the private data. */
- }
-
- loads--;
-}
-
-static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{
- if (enif_is_atom(env, argv[0])) {
- return enif_make_tuple2(env,
- enif_make_atom(env, "hello"),
- argv[0]);
- }
-
- return enif_make_tuple2(env,
- enif_make_atom(env, "error"),
- enif_make_atom(env, "badarg"));
-}
-
-static ErlNifFunc nif_funcs[] = {
- {"hello", 1, hello}
-};
-
-ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
-endef
-
-define bs_erl_nif
--module($n).
-
--export([hello/1]).
-
--on_load(on_load/0).
-on_load() ->
- PrivDir = case code:priv_dir(?MODULE) of
- {error, _} ->
- AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
- filename:join(AppPath, "priv");
- Path ->
- Path
- end,
- erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
-
-hello(_) ->
- erlang:nif_error({not_loaded, ?MODULE}).
-endef
-
-new-nif:
-ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
- $(error Error: $(C_SRC_DIR)/$n.c already exists)
-endif
-ifneq ($(wildcard src/$n.erl),)
- $(error Error: src/$n.erl already exists)
-endif
-ifndef n
- $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
-endif
-ifdef in
- $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
-else
- $(verbose) mkdir -p $(C_SRC_DIR) src/
- $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
- $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ci ci-prepare ci-setup
-
-CI_OTP ?=
-CI_HIPE ?=
-CI_ERLLVM ?=
-
-ifeq ($(CI_VM),native)
-ERLC_OPTS += +native
-TEST_ERLC_OPTS += +native
-else ifeq ($(CI_VM),erllvm)
-ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
-endif
-
-ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
-ci::
-else
-
-ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
-
-ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
-
-ci-setup::
- $(verbose) :
-
-ci-extra::
- $(verbose) :
-
-ci_verbose_0 = @echo " CI " $(1);
-ci_verbose = $(ci_verbose_$(V))
-
-define ci_target
-ci-$1: $(KERL_INSTALL_DIR)/$2
- $(verbose) $(MAKE) --no-print-directory clean
- $(ci_verbose) \
- PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
- CI_OTP_RELEASE="$1" \
- CT_OPTS="-label $1" \
- CI_VM="$3" \
- $(MAKE) ci-setup tests
- $(verbose) $(MAKE) --no-print-directory ci-extra
-endef
-
-$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
-$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
-$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
-
-$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
-$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
-
-help::
- $(verbose) printf "%s\n" "" \
- "Continuous Integration targets:" \
- " ci Run '$(MAKE) tests' on all configured Erlang versions." \
- "" \
- "The CI_OTP variable must be defined with the Erlang versions" \
- "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
-
-endif
-
-# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifdef CONCUERROR_TESTS
-
-.PHONY: concuerror distclean-concuerror
-
-# Configuration
-
-CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
-CONCUERROR_OPTS ?=
-
-# Core targets.
-
-check:: concuerror
-
-ifndef KEEP_LOGS
-distclean:: distclean-concuerror
-endif
-
-# Plugin-specific targets.
-
-$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
- $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
- $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
-
-$(CONCUERROR_LOGS_DIR):
- $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
-
-define concuerror_html_report
-<!DOCTYPE html>
-<html lang="en">
-<head>
-<meta charset="utf-8">
-<title>Concuerror HTML report</title>
-</head>
-<body>
-<h1>Concuerror HTML report</h1>
-<p>Generated on $(concuerror_date)</p>
-<ul>
-$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
-</ul>
-</body>
-</html>
-endef
-
-concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
- $(eval concuerror_date := $(shell date))
- $(eval concuerror_targets := $^)
- $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
-
-define concuerror_target
-.PHONY: concuerror-$1-$2
-
-concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
- $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
- --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
- -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
- $$(CONCUERROR_OPTS) -m $1 -t $2
-endef
-
-$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
-
-distclean-concuerror:
- $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
-
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: ct apps-ct distclean-ct
-
-# Configuration.
-
-CT_OPTS ?=
-
-ifneq ($(wildcard $(TEST_DIR)),)
-ifndef CT_SUITES
-CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
-endif
-endif
-CT_SUITES ?=
-CT_LOGS_DIR ?= $(CURDIR)/logs
-
-# Core targets.
-
-tests:: ct
-
-ifndef KEEP_LOGS
-distclean:: distclean-ct
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Common_test targets:" \
- " ct Run all the common_test suites for this project" \
- "" \
- "All your common_test suites have their associated targets." \
- "A suite named http_SUITE can be ran using the ct-http target."
-
-# Plugin-specific targets.
-
-CT_RUN = ct_run \
- -no_auto_compile \
- -noinput \
- -pa $(CURDIR)/ebin $(TEST_DIR) \
- -dir $(TEST_DIR) \
- -logdir $(CT_LOGS_DIR)
-
-ifeq ($(CT_SUITES),)
-ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
-else
-# We do not run tests if we are in an apps/* with no test directory.
-ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
-ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
-endif
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-define ct_app_target
-apps-ct-$1: test-build
- $$(MAKE) -C $1 ct IS_APP=1
-endef
-
-$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
-
-apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
-endif
-
-ifdef t
-ifeq (,$(findstring :,$t))
-CT_EXTRA = -group $t
-else
-t_words = $(subst :, ,$t)
-CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
-endif
-else
-ifdef c
-CT_EXTRA = -case $c
-else
-CT_EXTRA =
-endif
-endif
-
-define ct_suite_target
-ct-$(1): test-build
- $(verbose) mkdir -p $(CT_LOGS_DIR)
- $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
-endef
-
-$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
-
-distclean-ct:
- $(gen_verbose) rm -rf $(CT_LOGS_DIR)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: plt distclean-plt dialyze
-
-# Configuration.
-
-DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
-export DIALYZER_PLT
-
-PLT_APPS ?=
-DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
-DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
-DIALYZER_PLT_OPTS ?=
-
-# Core targets.
-
-check:: dialyze
-
-distclean:: distclean-plt
-
-help::
- $(verbose) printf "%s\n" "" \
- "Dialyzer targets:" \
- " plt Build a PLT file for this project" \
- " dialyze Analyze the project using Dialyzer"
-
-# Plugin-specific targets.
-
-define filter_opts.erl
- Opts = init:get_plain_arguments(),
- {Filtered, _} = lists:foldl(fun
- (O, {Os, true}) -> {[O|Os], false};
- (O = "-D", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-I", {Os, _}) -> {[O|Os], true};
- (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
- (O = "-pa", {Os, _}) -> {[O|Os], true};
- (_, Acc) -> Acc
- end, {[], false}, Opts),
- io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
- halt().
-endef
-
-# DIALYZER_PLT is a variable understood directly by Dialyzer.
-#
-# We append the path to erts at the end of the PLT. This works
-# because the PLT file is in the external term format and the
-# function binary_to_term/1 ignores any trailing data.
-$(DIALYZER_PLT): deps app
- $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
- while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
- $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
- erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
- $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
-
-plt: $(DIALYZER_PLT)
-
-distclean-plt:
- $(gen_verbose) rm -f $(DIALYZER_PLT)
-
-ifneq ($(wildcard $(DIALYZER_PLT)),)
-dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
- $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
- grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
- rm $(DIALYZER_PLT); \
- $(MAKE) plt; \
- fi
-else
-dialyze: $(DIALYZER_PLT)
-endif
- $(verbose) dialyzer --no_native `$(ERL) \
- -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
- -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-edoc edoc
-
-# Configuration.
-
-EDOC_OPTS ?=
-EDOC_SRC_DIRS ?=
-EDOC_OUTPUT ?= doc
-
-define edoc.erl
- SrcPaths = lists:foldl(fun(P, Acc) ->
- filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
- end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
- DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
- edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
- halt(0).
-endef
-
-# Core targets.
-
-ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
-docs:: edoc
-endif
-
-distclean:: distclean-edoc
-
-# Plugin-specific targets.
-
-edoc: distclean-edoc doc-deps
- $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
-
-distclean-edoc:
- $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Configuration.
-
-DTL_FULL_PATH ?=
-DTL_PATH ?= templates/
-DTL_PREFIX ?=
-DTL_SUFFIX ?= _dtl
-DTL_OPTS ?=
-
-# Verbosity.
-
-dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
-dtl_verbose = $(dtl_verbose_$(V))
-
-# Core targets.
-
-DTL_PATH := $(abspath $(DTL_PATH))
-DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
-
-ifneq ($(DTL_FILES),)
-
-DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
-DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
-BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
-
-ifneq ($(words $(DTL_FILES)),0)
-# Rebuild templates when the Makefile changes.
-$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(DTL_FILES); \
- fi
- $(verbose) touch $@
-
-ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
-endif
-
-define erlydtl_compile.erl
- [begin
- Module0 = case "$(strip $(DTL_FULL_PATH))" of
- "" ->
- filename:basename(F, ".dtl");
- _ ->
- "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
- re:replace(F2, "/", "_", [{return, list}, global])
- end,
- Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
- case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
- ok -> ok;
- {ok, _} -> ok
- end
- end || F <- string:tokens("$(1)", " ")],
- halt().
-endef
-
-ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
- $(if $(strip $?),\
- $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
- -pa ebin/))
-
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-escript escript escript-zip
-
-# Configuration.
-
-ESCRIPT_NAME ?= $(PROJECT)
-ESCRIPT_FILE ?= $(ESCRIPT_NAME)
-
-ESCRIPT_SHEBANG ?= /usr/bin/env escript
-ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
-ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
-
-ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
-ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
-
-# Core targets.
-
-distclean:: distclean-escript
-
-help::
- $(verbose) printf "%s\n" "" \
- "Escript targets:" \
- " escript Build an executable escript archive" \
-
-# Plugin-specific targets.
-
-escript-zip:: FULL=1
-escript-zip:: deps app
- $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
- $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
- $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
-ifneq ($(DEPS),)
- $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
- $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
- $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
-endif
-
-escript:: escript-zip
- $(gen_verbose) printf "%s\n" \
- "#!$(ESCRIPT_SHEBANG)" \
- "%% $(ESCRIPT_COMMENT)" \
- "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
- $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
- $(verbose) chmod +x $(ESCRIPT_FILE)
-
-distclean-escript:
- $(gen_verbose) rm -f $(ESCRIPT_FILE)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: eunit apps-eunit
-
-# Configuration
-
-EUNIT_OPTS ?=
-EUNIT_ERL_OPTS ?=
-
-# Core targets.
-
-tests:: eunit
-
-help::
- $(verbose) printf "%s\n" "" \
- "EUnit targets:" \
- " eunit Run all the EUnit tests for this project"
-
-# Plugin-specific targets.
-
-define eunit.erl
- $(call cover.erl)
- CoverSetup(),
- case eunit:test($1, [$(EUNIT_OPTS)]) of
- ok -> ok;
- error -> halt(2)
- end,
- CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
- halt()
-endef
-
-EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
-else
-eunit: test-build cover-data-dir
- $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
-endif
-else
-EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
-EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
-
-EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
- $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
-
-eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
-ifneq ($(wildcard src/ $(TEST_DIR)),)
- $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
-endif
-
-ifneq ($(ALL_APPS_DIRS),)
-apps-eunit: test-build
- $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
- [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
- exit $$eunit_retcode
-endif
-endif
-
-# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
-.PHONY: proper
-
-# Targets.
-
-tests:: proper
-
-define proper_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- Module = fun(M) ->
- [true] =:= lists:usort([
- case atom_to_list(F) of
- "prop_" ++ _ ->
- io:format("Testing ~p:~p/0~n", [M, F]),
- proper:quickcheck(M:F(), nocolors);
- _ ->
- true
- end
- || {F, 0} <- M:module_info(exports)])
- end,
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
- module -> Module($(2));
- function -> proper:quickcheck($(2), nocolors)
- end,
- CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-proper: test-build cover-data-dir
- $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
-else
-proper: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
-endif
-else
-proper: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Verbosity.
-
-proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
-proto_verbose = $(proto_verbose_$(V))
-
-# Core targets.
-
-ifneq ($(wildcard src/),)
-ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
-PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
-ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
-
-ifeq ($(PROTO_FILES),)
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
- $(verbose) :
-else
-# Rebuild proto files when the Makefile changes.
-# We exclude $(PROJECT).d to avoid a circular dependency.
-$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
- $(verbose) if test -f $@; then \
- touch $(PROTO_FILES); \
- fi
- $(verbose) touch $@
-
-$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
-endif
-
-ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
-define compile_proto.erl
- [begin
- protobuffs_compile:generate_source(F, [
- {output_include_dir, "./include"},
- {output_src_dir, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-else
-define compile_proto.erl
- [begin
- gpb_compile:file(F, [
- {include_as_lib, true},
- {module_name_suffix, "_pb"},
- {o_hrl, "./include"},
- {o_erl, "./src"}])
- end || F <- string:tokens("$1", " ")],
- halt().
-endef
-endif
-
-ifneq ($(PROTO_FILES),)
-$(PROJECT).d:: $(PROTO_FILES)
- $(verbose) mkdir -p ebin/ include/
- $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
-endif
-endif
-endif
-
-# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: relx-rel relx-relup distclean-relx-rel run
-
-# Configuration.
-
-RELX ?= $(ERLANG_MK_TMP)/relx
-RELX_CONFIG ?= $(CURDIR)/relx.config
-
-RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
-RELX_OPTS ?=
-RELX_OUTPUT_DIR ?= _rel
-RELX_REL_EXT ?=
-RELX_TAR ?= 1
-
-ifdef SFX
- RELX_TAR = 1
-endif
-
-ifeq ($(firstword $(RELX_OPTS)),-o)
- RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
-else
- RELX_OPTS += -o $(RELX_OUTPUT_DIR)
-endif
-
-# Core targets.
-
-ifeq ($(IS_DEP),)
-ifneq ($(wildcard $(RELX_CONFIG)),)
-rel:: relx-rel
-
-relup:: relx-relup
-endif
-endif
-
-distclean:: distclean-relx-rel
-
-# Plugin-specific targets.
-
-$(RELX): | $(ERLANG_MK_TMP)
- $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
- $(verbose) chmod +x $(RELX)
-
-relx-rel: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(verbose) $(MAKE) relx-post-rel
-ifeq ($(RELX_TAR),1)
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
-endif
-
-relx-relup: $(RELX) rel-deps app
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
- $(MAKE) relx-post-rel
- $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
-
-distclean-relx-rel:
- $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
-
-# Default hooks.
-relx-post-rel::
- $(verbose) :
-
-# Run target.
-
-ifeq ($(wildcard $(RELX_CONFIG)),)
-run::
-else
-
-define get_relx_release.erl
- {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
- {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
- Vsn = case Vsn0 of
- {cmd, Cmd} -> os:cmd(Cmd);
- semver -> "";
- {semver, _} -> "";
- VsnStr -> Vsn0
- end,
- Extended = case lists:keyfind(extended_start_script, 1, Config) of
- {_, true} -> "1";
- _ -> ""
- end,
- io:format("~s ~s ~s", [Name, Vsn, Extended]),
- halt(0).
-endef
-
-RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
-RELX_REL_NAME := $(word 1,$(RELX_REL))
-RELX_REL_VSN := $(word 2,$(RELX_REL))
-RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
-
-ifeq ($(PLATFORM),msys2)
-RELX_REL_EXT := .cmd
-endif
-
-run:: all
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
-
-ifdef RELOAD
-rel::
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
- $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
- eval "io:format(\"~p~n\", [c:lm()])"
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Relx targets:" \
- " run Compile the project, build the release and run it"
-
-endif
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: shell
-
-# Configuration.
-
-SHELL_ERL ?= erl
-SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
-SHELL_OPTS ?=
-
-ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
-
-# Core targets
-
-help::
- $(verbose) printf "%s\n" "" \
- "Shell targets:" \
- " shell Run an erlang shell with SHELL_OPTS or reasonable default"
-
-# Plugin-specific targets.
-
-$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
-
-ifneq ($(SKIP_DEPS),)
-build-shell-deps:
-else
-build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
- $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
- if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
- :; \
- else \
- $(MAKE) -C $$dep IS_DEP=1; \
- if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
- fi \
- done
-endif
-
-shell:: build-shell-deps
- $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
-
-# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: distclean-sphinx sphinx
-
-# Configuration.
-
-SPHINX_BUILD ?= sphinx-build
-SPHINX_SOURCE ?= doc
-SPHINX_CONFDIR ?=
-SPHINX_FORMATS ?= html
-SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
-SPHINX_OPTS ?=
-
-#sphinx_html_opts =
-#sphinx_html_output = html
-#sphinx_man_opts =
-#sphinx_man_output = man
-#sphinx_latex_opts =
-#sphinx_latex_output = latex
-
-# Helpers.
-
-sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
-sphinx_build_1 = $(SPHINX_BUILD) -N
-sphinx_build_2 = set -x; $(SPHINX_BUILD)
-sphinx_build = $(sphinx_build_$(V))
-
-define sphinx.build
-$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
-
-endef
-
-define sphinx.output
-$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
-endef
-
-# Targets.
-
-ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
-docs:: sphinx
-distclean:: distclean-sphinx
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Sphinx targets:" \
- " sphinx Generate Sphinx documentation." \
- "" \
- "ReST sources and 'conf.py' file are expected in directory pointed by" \
- "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
- "'html' format is generated by default); target directory can be specified by" \
- 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
- "Additional Sphinx options can be set in SPHINX_OPTS."
-
-# Plugin-specific targets.
-
-sphinx:
- $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
-
-distclean-sphinx:
- $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
-
-# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is contributed to erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
-
-show-ERL_LIBS:
- @echo $(ERL_LIBS)
-
-show-ERLC_OPTS:
- @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-show-TEST_ERLC_OPTS:
- @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
-
-# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
-.PHONY: triq
-
-# Targets.
-
-tests:: triq
-
-define triq_check.erl
- $(call cover.erl)
- code:add_pathsa([
- "$(call core_native_path,$(CURDIR)/ebin)",
- "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
- "$(call core_native_path,$(TEST_DIR))"]),
- try begin
- CoverSetup(),
- Res = case $(1) of
- all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
- module -> triq:check($(2));
- function -> triq:check($(2))
- end,
- CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
- Res
- end of
- true -> halt(0);
- _ -> halt(1)
- catch error:undef ->
- io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
- halt(0)
- end.
-endef
-
-ifdef t
-ifeq (,$(findstring :,$(t)))
-triq: test-build cover-data-dir
- $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
-else
-triq: test-build cover-data-dir
- $(verbose) echo Testing $(t)/0
- $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
-endif
-else
-triq: test-build cover-data-dir
- $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
- $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
- $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
-endif
-endif
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Erlang Solutions Ltd.
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: xref distclean-xref
-
-# Configuration.
-
-ifeq ($(XREF_CONFIG),)
- XREFR_ARGS :=
-else
- XREFR_ARGS := -c $(XREF_CONFIG)
-endif
-
-XREFR ?= $(CURDIR)/xrefr
-export XREFR
-
-XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
-
-# Core targets.
-
-help::
- $(verbose) printf '%s\n' '' \
- 'Xref targets:' \
- ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
-
-distclean:: distclean-xref
-
-# Plugin-specific targets.
-
-$(XREFR):
- $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
- $(verbose) chmod +x $(XREFR)
-
-xref: deps app $(XREFR)
- $(gen_verbose) $(XREFR) $(XREFR_ARGS)
-
-distclean-xref:
- $(gen_verbose) rm -rf $(XREFR)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-COVER_REPORT_DIR ?= cover
-COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
-
-ifdef COVER
-COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
-COVER_DEPS ?=
-endif
-
-# Code coverage for Common Test.
-
-ifdef COVER
-ifdef CT_RUN
-ifneq ($(wildcard $(TEST_DIR)),)
-test-build:: $(TEST_DIR)/ct.cover.spec
-
-$(TEST_DIR)/ct.cover.spec: cover-data-dir
- $(gen_verbose) printf "%s\n" \
- "{incl_app, '$(PROJECT)', details}." \
- "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
- $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
- $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
- '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
-
-CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
-endif
-endif
-endif
-
-# Code coverage for other tools.
-
-ifdef COVER
-define cover.erl
- CoverSetup = fun() ->
- Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
- $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
- $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
- [begin
- case filelib:is_dir(Dir) of
- false -> false;
- true ->
- case cover:compile_beam_directory(Dir) of
- {error, _} -> halt(1);
- _ -> true
- end
- end
- end || Dir <- Dirs]
- end,
- CoverExport = fun(Filename) -> cover:export(Filename) end,
-endef
-else
-define cover.erl
- CoverSetup = fun() -> ok end,
- CoverExport = fun(_) -> ok end,
-endef
-endif
-
-# Core targets
-
-ifdef COVER
-ifneq ($(COVER_REPORT_DIR),)
-tests::
- $(verbose) $(MAKE) --no-print-directory cover-report
-endif
-
-cover-data-dir: | $(COVER_DATA_DIR)
-
-$(COVER_DATA_DIR):
- $(verbose) mkdir -p $(COVER_DATA_DIR)
-else
-cover-data-dir:
-endif
-
-clean:: coverdata-clean
-
-ifneq ($(COVER_REPORT_DIR),)
-distclean:: cover-report-clean
-endif
-
-help::
- $(verbose) printf "%s\n" "" \
- "Cover targets:" \
- " cover-report Generate a HTML coverage report from previously collected" \
- " cover data." \
- " all.coverdata Merge all coverdata files into all.coverdata." \
- "" \
- "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
- "target tests additionally generates a HTML coverage report from the combined" \
- "coverdata files from each of these testing tools. HTML reports can be disabled" \
- "by setting COVER_REPORT_DIR to empty."
-
-# Plugin specific targets
-
-COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
-
-.PHONY: coverdata-clean
-coverdata-clean:
- $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
-
-# Merge all coverdata files into one.
-define cover_export.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
-endef
-
-all.coverdata: $(COVERDATA) cover-data-dir
- $(gen_verbose) $(call erlang,$(cover_export.erl))
-
-# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
-# empty if you want the coverdata files but not the HTML report.
-ifneq ($(COVER_REPORT_DIR),)
-
-.PHONY: cover-report-clean cover-report
-
-cover-report-clean:
- $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
-ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
- $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
-endif
-
-ifeq ($(COVERDATA),)
-cover-report:
-else
-
-# Modules which include eunit.hrl always contain one line without coverage
-# because eunit defines test/0 which is never called. We compensate for this.
-EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
- grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
- | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
-
-define cover_report.erl
- $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
- Ms = cover:imported_modules(),
- [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
- ++ ".COVER.html", [html]) || M <- Ms],
- Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
- EunitHrlMods = [$(EUNIT_HRL_MODS)],
- Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
- true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
- TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
- TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
- Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
- TotalPerc = Perc(TotalY, TotalN),
- {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
- io:format(F, "<!DOCTYPE html><html>~n"
- "<head><meta charset=\"UTF-8\">~n"
- "<title>Coverage report</title></head>~n"
- "<body>~n", []),
- io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
- io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
- [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
- "<td>~p%</td></tr>~n",
- [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
- How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
- Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
- io:format(F, "</table>~n"
- "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
- "</body></html>", [How, Date]),
- halt().
-endef
-
-cover-report:
- $(verbose) mkdir -p $(COVER_REPORT_DIR)
- $(gen_verbose) $(call erlang,$(cover_report.erl))
-
-endif
-endif # ifneq ($(COVER_REPORT_DIR),)
-
-# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-.PHONY: sfx
-
-ifdef RELX_REL
-ifdef SFX
-
-# Configuration.
-
-SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
-SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
-
-# Core targets.
-
-rel:: sfx
-
-# Plugin-specific targets.
-
-define sfx_stub
-#!/bin/sh
-
-TMPDIR=`mktemp -d`
-ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
-FILENAME=$$(basename $$0)
-REL=$${FILENAME%.*}
-
-tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
-
-$$TMPDIR/bin/$$REL console
-RET=$$?
-
-rm -rf $$TMPDIR
-
-exit $$RET
-
-__ARCHIVE_BELOW__
-endef
-
-sfx:
- $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
- $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
- $(verbose) chmod +x $(SFX_OUTPUT_FILE)
-
-endif
-endif
-
-# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# External plugins.
-
-DEP_PLUGINS ?=
-
-$(foreach p,$(DEP_PLUGINS),\
- $(eval $(if $(findstring /,$p),\
- $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
- $(call core_dep_plugin,$p/plugins.mk,$p))))
-
-help:: help-plugins
-
-help-plugins::
- $(verbose) :
-
-# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
-# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-# This file is part of erlang.mk and subject to the terms of the ISC License.
-
-# Fetch dependencies recursively (without building them).
-
-.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
- fetch-shell-deps
-
-.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
- $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-ifneq ($(SKIP_DEPS),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
- $(verbose) :> $@
-else
-# By default, we fetch "normal" dependencies. They are also included no
-# matter the type of requested dependencies.
-#
-# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
-
-# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
-# dependencies with a single target.
-ifneq ($(filter doc,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
-endif
-ifneq ($(filter rel,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
-endif
-ifneq ($(filter test,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
-endif
-ifneq ($(filter shell,$(DEP_TYPES)),)
-$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
-endif
-
-ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
-
-$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
-$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
- $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
- $(verbose) set -e; for dep in $^ ; do \
- if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
- echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
- if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
- $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
- $(MAKE) -C $$dep fetch-deps \
- IS_DEP=1 \
- ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
- fi \
- fi \
- done
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
- uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
- || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
- $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
- $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
-endif
-endif # ifneq ($(SKIP_DEPS),)
-
-# List dependencies recursively.
-
-.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
- list-shell-deps
-
-list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
-list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
-list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
-list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
-list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
-
-list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
- $(verbose) cat $^
-
-# Query dependencies recursively.
-
-.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
- query-shell-deps
-
-QUERY ?= name fetch_method repo version
-
-define query_target
-$(1): $(2) clean-tmp-query.log
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) rm -f $(4)
-endif
- $(verbose) $(foreach dep,$(3),\
- echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
- $(if $(filter-out query-deps,$(1)),,\
- $(verbose) set -e; for dep in $(3) ; do \
- if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
- :; \
- else \
- echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
- $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
- fi \
- done)
-ifeq ($(IS_APP)$(IS_DEP),)
- $(verbose) touch $(4)
- $(verbose) cat $(4)
-endif
-endef
-
-clean-tmp-query.log:
-ifeq ($(IS_DEP),)
- $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
-endif
-
-$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
-$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
-$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
-$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
-$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_web_stomp_examples/rabbitmq-components.mk b/deps/rabbitmq_web_stomp_examples/rabbitmq-components.mk
deleted file mode 100644
index b2a3be8b35..0000000000
--- a/deps/rabbitmq_web_stomp_examples/rabbitmq-components.mk
+++ /dev/null
@@ -1,359 +0,0 @@
-ifeq ($(.DEFAULT_GOAL),)
-# Define default goal to `all` because this file defines some targets
-# before the inclusion of erlang.mk leading to the wrong target becoming
-# the default.
-.DEFAULT_GOAL = all
-endif
-
-# PROJECT_VERSION defaults to:
-# 1. the version exported by rabbitmq-server-release;
-# 2. the version stored in `git-revisions.txt`, if it exists;
-# 3. a version based on git-describe(1), if it is a Git clone;
-# 4. 0.0.0
-
-PROJECT_VERSION := $(RABBITMQ_VERSION)
-
-ifeq ($(PROJECT_VERSION),)
-PROJECT_VERSION := $(shell \
-if test -f git-revisions.txt; then \
- head -n1 git-revisions.txt | \
- awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
-else \
- (git describe --dirty --abbrev=7 --tags --always --first-parent \
- 2>/dev/null || echo rabbitmq_v0_0_0) | \
- sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
- -e 's/-/./g'; \
-fi)
-endif
-
-# --------------------------------------------------------------------
-# RabbitMQ components.
-# --------------------------------------------------------------------
-
-# For RabbitMQ repositories, we want to checkout branches which match
-# the parent project. For instance, if the parent project is on a
-# release tag, dependencies must be on the same release tag. If the
-# parent project is on a topic branch, dependencies must be on the same
-# topic branch or fallback to `stable` or `master` whichever was the
-# base of the topic branch.
-
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
-
-# Third-party dependencies version pinning.
-#
-# We do that in this file, which is copied in all projects, to ensure
-# all projects use the same versions. It avoids conflicts and makes it
-# possible to work with rabbitmq-public-umbrella.
-
-dep_accept = hex 0.3.5
-dep_cowboy = hex 2.8.0
-dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
-dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
-dep_stdout_formatter = hex 0.2.4
-dep_sysmon_handler = hex 1.3.0
-
-RABBITMQ_COMPONENTS = amqp_client \
- amqp10_common \
- amqp10_client \
- rabbit \
- rabbit_common \
- rabbitmq_amqp1_0 \
- rabbitmq_auth_backend_amqp \
- rabbitmq_auth_backend_cache \
- rabbitmq_auth_backend_http \
- rabbitmq_auth_backend_ldap \
- rabbitmq_auth_backend_oauth2 \
- rabbitmq_auth_mechanism_ssl \
- rabbitmq_aws \
- rabbitmq_boot_steps_visualiser \
- rabbitmq_cli \
- rabbitmq_codegen \
- rabbitmq_consistent_hash_exchange \
- rabbitmq_ct_client_helpers \
- rabbitmq_ct_helpers \
- rabbitmq_delayed_message_exchange \
- rabbitmq_dotnet_client \
- rabbitmq_event_exchange \
- rabbitmq_federation \
- rabbitmq_federation_management \
- rabbitmq_java_client \
- rabbitmq_jms_client \
- rabbitmq_jms_cts \
- rabbitmq_jms_topic_exchange \
- rabbitmq_lvc_exchange \
- rabbitmq_management \
- rabbitmq_management_agent \
- rabbitmq_management_exchange \
- rabbitmq_management_themes \
- rabbitmq_message_timestamp \
- rabbitmq_metronome \
- rabbitmq_mqtt \
- rabbitmq_objc_client \
- rabbitmq_peer_discovery_aws \
- rabbitmq_peer_discovery_common \
- rabbitmq_peer_discovery_consul \
- rabbitmq_peer_discovery_etcd \
- rabbitmq_peer_discovery_k8s \
- rabbitmq_prometheus \
- rabbitmq_random_exchange \
- rabbitmq_recent_history_exchange \
- rabbitmq_routing_node_stamp \
- rabbitmq_rtopic_exchange \
- rabbitmq_server_release \
- rabbitmq_sharding \
- rabbitmq_shovel \
- rabbitmq_shovel_management \
- rabbitmq_stomp \
- rabbitmq_stream \
- rabbitmq_toke \
- rabbitmq_top \
- rabbitmq_tracing \
- rabbitmq_trust_store \
- rabbitmq_web_dispatch \
- rabbitmq_web_mqtt \
- rabbitmq_web_mqtt_examples \
- rabbitmq_web_stomp \
- rabbitmq_web_stomp_examples \
- rabbitmq_website
-
-# Erlang.mk does not rebuild dependencies by default, once they were
-# compiled once, except for those listed in the `$(FORCE_REBUILD)`
-# variable.
-#
-# We want all RabbitMQ components to always be rebuilt: this eases
-# the work on several components at the same time.
-
-FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
-
-# Several components have a custom erlang.mk/build.config, mainly
-# to disable eunit. Therefore, we can't use the top-level project's
-# erlang.mk copy.
-NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
-
-ifeq ($(origin current_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-current_rmq_ref := $(shell (\
- ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
- if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
-else
-current_rmq_ref := master
-endif
-endif
-export current_rmq_ref
-
-ifeq ($(origin base_rmq_ref),undefined)
-ifneq ($(wildcard .git),)
-possible_base_rmq_ref := master
-ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
-base_rmq_ref := $(current_rmq_ref)
-else
-base_rmq_ref := $(shell \
- (git rev-parse --verify -q master >/dev/null && \
- git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
- git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
- echo $(possible_base_rmq_ref)) || \
- echo master)
-endif
-else
-base_rmq_ref := master
-endif
-endif
-export base_rmq_ref
-
-# Repository URL selection.
-#
-# First, we infer other components' location from the current project
-# repository URL, if it's a Git repository:
-# - We take the "origin" remote URL as the base
-# - The current project name and repository name is replaced by the
-# target's properties:
-# eg. rabbitmq-common is replaced by rabbitmq-codegen
-# eg. rabbit_common is replaced by rabbitmq_codegen
-#
-# If cloning from this computed location fails, we fallback to RabbitMQ
-# upstream which is GitHub.
-
-# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
-rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
-
-# Upstream URL for the current project.
-RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
-RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
-
-# Current URL for the current project. If this is not a Git clone,
-# default to the upstream Git repository.
-ifneq ($(wildcard .git),)
-git_origin_fetch_url := $(shell git config remote.origin.url)
-git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
-RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
-RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
-else
-RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
-RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
-endif
-
-# Macro to replace the following pattern:
-# 1. /foo.git -> /bar.git
-# 2. /foo -> /bar
-# 3. /foo/ -> /bar/
-subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
-
-# Macro to replace both the project's name (eg. "rabbit_common") and
-# repository name (eg. "rabbitmq-common") by the target's equivalent.
-#
-# This macro is kept on one line because we don't want whitespaces in
-# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
-# single-quoted string.
-dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
-
-dep_rmq_commits = $(if $(dep_$(1)), \
- $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
- $(pkg_$(1)_commit))
-
-define dep_fetch_git_rmq
- fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
- fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
- if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
- git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url1"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
- elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
- fetch_url="$$$$fetch_url2"; \
- push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
- fi; \
- cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
- $(foreach ref,$(call dep_rmq_commits,$(1)), \
- git checkout -q $(ref) >/dev/null 2>&1 || \
- ) \
- (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
- 1>&2 && false) ) && \
- (test "$$$$fetch_url" = "$$$$push_url" || \
- git remote set-url --push origin "$$$$push_url")
-endef
-
-# --------------------------------------------------------------------
-# Component distribution.
-# --------------------------------------------------------------------
-
-list-dist-deps::
- @:
-
-prepare-dist::
- @:
-
-# --------------------------------------------------------------------
-# Umbrella-specific settings.
-# --------------------------------------------------------------------
-
-# If the top-level project is a RabbitMQ component, we override
-# $(DEPS_DIR) for this project to point to the top-level's one.
-#
-# We also verify that the guessed DEPS_DIR is actually named `deps`,
-# to rule out any situation where it is a coincidence that we found a
-# `rabbitmq-components.mk` up upper directories.
-
-possible_deps_dir_1 = $(abspath ..)
-possible_deps_dir_2 = $(abspath ../../..)
-
-ifeq ($(notdir $(possible_deps_dir_1)),deps)
-ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_1)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-
-ifeq ($(deps_dir_overriden),)
-ifeq ($(notdir $(possible_deps_dir_2)),deps)
-ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
-deps_dir_overriden = 1
-DEPS_DIR ?= $(possible_deps_dir_2)
-DISABLE_DISTCLEAN = 1
-endif
-endif
-endif
-
-ifneq ($(wildcard UMBRELLA.md),)
-DISABLE_DISTCLEAN = 1
-endif
-
-# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
-
-ifeq ($(DISABLE_DISTCLEAN),1)
-ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
-SKIP_DEPS = 1
-endif
-endif
diff --git a/deps/rabbitmq_web_stomp_examples/src/rabbit_web_stomp_examples_app.erl b/deps/rabbitmq_web_stomp_examples/src/rabbit_web_stomp_examples_app.erl
index 58011ed126..a08473cc6b 100644
--- a/deps/rabbitmq_web_stomp_examples/src/rabbit_web_stomp_examples_app.erl
+++ b/deps/rabbitmq_web_stomp_examples/src/rabbit_web_stomp_examples_app.erl
@@ -2,7 +2,7 @@
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
-%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%% Copyright (c) 2007-2021 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(rabbit_web_stomp_examples_app).
@@ -11,7 +11,7 @@
-export([start/2,stop/1]).
%% Dummy supervisor - see Ulf Wiger's comment at
-%% http://erlang.2086793.n4.nabble.com/initializing-library-applications-without-processes-td2094473.html
+%% http://erlang.org/pipermail/erlang-questions/2010-April/050508.html
-behaviour(supervisor).
-export([init/1]).
diff --git a/dist.bzl b/dist.bzl
new file mode 100644
index 0000000000..18c50b3c9b
--- /dev/null
+++ b/dist.bzl
@@ -0,0 +1,200 @@
+load("@bazel-erlang//:erlang_home.bzl", "ErlangHomeProvider")
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "ErlangLibInfo", "flat_deps", "path_join")
+load("@bazel-erlang//:ct.bzl", "additional_file_dest_relative_path")
+load(
+ ":rabbitmq_home.bzl",
+ "RABBITMQ_HOME_ATTRS",
+ "RabbitmqHomeInfo",
+ "flatten",
+ "link_escript",
+ "unique_versions",
+)
+
+def _collect_licenses_impl(ctx):
+ srcs = ctx.files.srcs + flatten([
+ d[ErlangLibInfo].license_files
+ for d in flat_deps(ctx.attr.deps)
+ ])
+
+ outs = {}
+ for src in srcs:
+ name = src.basename
+ if name not in outs:
+ dest = ctx.actions.declare_file(name)
+ ctx.actions.run(
+ inputs = [src],
+ outputs = [dest],
+ executable = "cp",
+ arguments = [
+ src.path,
+ dest.path,
+ ],
+ )
+ outs[name] = dest
+
+ return [
+ DefaultInfo(
+ files = depset(sorted(outs.values())),
+ ),
+ ]
+
+collect_licenses = rule(
+ implementation = _collect_licenses_impl,
+ attrs = {
+ "srcs": attr.label_list(allow_files = True),
+ "deps": attr.label_list(providers = [ErlangLibInfo]),
+ },
+)
+
+def _copy_script(ctx, script):
+ dest = ctx.actions.declare_file(path_join(ctx.label.name, "sbin", script.basename))
+ ctx.actions.expand_template(
+ template = script,
+ output = dest,
+ substitutions = {
+ "SYS_PREFIX=": "SYS_PREFIX=${RABBITMQ_HOME}",
+ },
+ )
+ return dest
+
+def _extract_version(p):
+ return "erl -eval '{ok, [{application, _, AppInfo}]} = file:consult(\"" + p + "\"), Version = proplists:get_value(vsn, AppInfo), io:fwrite(Version), halt().' -noshell"
+
+def _app_file(plugin_lib_info):
+ for f in plugin_lib_info.beam:
+ if f.basename.endswith(".app"):
+ return f
+ fail(".app file not found in {}".format(plugin_lib_info))
+
+def _plugins_dir(ctx, plugins):
+ plugins_dir = ctx.actions.declare_directory(path_join(ctx.label.name, "plugins"))
+
+ erlang_home = ctx.attr._erlang_home[ErlangHomeProvider].path
+
+ inputs = []
+
+ commands = ["set -euo pipefail", ""]
+
+ for plugin in plugins:
+ lib_info = plugin[ErlangLibInfo]
+ app_file = _app_file(lib_info)
+ extract_version = _extract_version(app_file.path)
+ commands.append("PLUGIN_VERSION=$({erlang_home}/bin/{extract_version})".format(erlang_home = erlang_home, extract_version = extract_version))
+
+ commands.append(
+ "echo \"Assembling {lib_name}-$PLUGIN_VERSION...\"".format(
+ lib_name = lib_info.lib_name,
+ ),
+ )
+
+ commands.append(
+ "mkdir -p {plugins_dir}/{lib_name}-$PLUGIN_VERSION/include".format(
+ plugins_dir = plugins_dir.path,
+ lib_name = lib_info.lib_name,
+ ),
+ )
+ for f in lib_info.include:
+ commands.append(
+ "cp {src} {plugins_dir}/{lib_name}-$PLUGIN_VERSION/include/{dest}".format(
+ src = f.path,
+ plugins_dir = plugins_dir.path,
+ lib_name = lib_info.lib_name,
+ dest = f.basename,
+ ),
+ )
+ inputs.extend(lib_info.include)
+
+ commands.append(
+ "mkdir -p {plugins_dir}/{lib_name}-$PLUGIN_VERSION/ebin".format(
+ plugins_dir = plugins_dir.path,
+ lib_name = lib_info.lib_name,
+ ),
+ )
+ for f in lib_info.beam:
+ if f.is_directory:
+ if f.basename != "ebin":
+ fail("{} contains a directory in 'beam' that is not an ebin dir".format(lib_info.lib_name))
+ commands.append(
+ "cp -R {src} {plugins_dir}/{lib_name}-$PLUGIN_VERSION".format(
+ src = f.path,
+ plugins_dir = plugins_dir.path,
+ lib_name = lib_info.lib_name,
+ ),
+ )
+ else:
+ commands.append(
+ "cp {src} {plugins_dir}/{lib_name}-$PLUGIN_VERSION/ebin/{dest}".format(
+ src = f.path,
+ plugins_dir = plugins_dir.path,
+ lib_name = lib_info.lib_name,
+ dest = f.basename,
+ ),
+ )
+ inputs.extend(lib_info.beam)
+
+ for f in lib_info.priv:
+ p = additional_file_dest_relative_path(plugin.label, f)
+ commands.append(
+ "mkdir -p $(dirname {plugins_dir}/{lib_name}-$PLUGIN_VERSION/{dest}) && cp {src} {plugins_dir}/{lib_name}-$PLUGIN_VERSION/{dest}".format(
+ src = f.path,
+ plugins_dir = plugins_dir.path,
+ lib_name = lib_info.lib_name,
+ dest = p,
+ ),
+ )
+ inputs.extend(lib_info.priv)
+
+ commands.append("")
+
+ ctx.actions.run_shell(
+ inputs = inputs,
+ outputs = [plugins_dir],
+ command = "\n".join(commands),
+ )
+
+ return plugins_dir
+
+def _versioned_rabbitmq_home_impl(ctx):
+ plugins = flat_deps(ctx.attr.plugins)
+
+ erlang_versions = unique_versions(plugins)
+ if len(erlang_versions) > 1:
+ fail("plugins do not have a unified erlang version", erlang_versions)
+
+ scripts = [_copy_script(ctx, script) for script in ctx.files._scripts]
+
+ rabbitmq_ctl_copies = [
+ "rabbitmq-diagnostics",
+ "rabbitmq-plugins",
+ "rabbitmq-queues",
+ "rabbitmq-streams",
+ "rabbitmq-tanzu",
+ "rabbitmq-upgrade",
+ "rabbitmqctl",
+ ]
+ escripts = [link_escript(ctx, escript) for escript in rabbitmq_ctl_copies]
+
+ plugins_dir = _plugins_dir(ctx, plugins)
+
+ rabbitmqctl = None
+ for script in scripts:
+ if script.basename == "rabbitmqctl":
+ rabbitmqctl = script
+ if rabbitmqctl == None:
+ fail("could not find rabbitmqct among", scripts)
+
+ return [
+ RabbitmqHomeInfo(
+ rabbitmqctl = rabbitmqctl,
+ ),
+ DefaultInfo(
+ files = depset(scripts + escripts + [plugins_dir]),
+ ),
+ ]
+
+versioned_rabbitmq_home = rule(
+ implementation = _versioned_rabbitmq_home_impl,
+ attrs = dict(RABBITMQ_HOME_ATTRS.items() + {
+ "_erlang_home": attr.label(default = "@bazel-erlang//:erlang_home"),
+ }.items()),
+)
diff --git a/elixir_home.bzl b/elixir_home.bzl
new file mode 100644
index 0000000000..8f35fc90ec
--- /dev/null
+++ b/elixir_home.bzl
@@ -0,0 +1,13 @@
+ElixirHomeProvider = provider(
+ fields = ["path"],
+)
+
+def _impl(ctx):
+ return ElixirHomeProvider(path = ctx.build_setting_value)
+
+elixir_home = rule(
+ implementation = _impl,
+ # The next line marks this as a special rule that we can
+ # configure when invoking the cli or via .bazelrc file
+ build_setting = config.string(flag = True),
+)
diff --git a/erlang.mk b/erlang.mk
index fce4be0b0a..7136106bdb 100644
--- a/erlang.mk
+++ b/erlang.mk
@@ -17,7 +17,7 @@
ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
export ERLANG_MK_FILENAME
-ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
+ERLANG_MK_VERSION = 2019.07.01-53-gd80984c
ERLANG_MK_WITHOUT =
# Make 3.81 and 3.82 are deprecated.
@@ -955,10 +955,10 @@ pkg_cr_commit = master
PACKAGES += cuttlefish
pkg_cuttlefish_name = cuttlefish
-pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
-pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_description = cuttlefish configuration abstraction
+pkg_cuttlefish_homepage = https://github.com/Kyorai/cuttlefish
pkg_cuttlefish_fetch = git
-pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_repo = https://github.com/Kyorai/cuttlefish
pkg_cuttlefish_commit = master
PACKAGES += damocles
@@ -2335,7 +2335,7 @@ pkg_jsx_description = An Erlang application for consuming, producing and manipul
pkg_jsx_homepage = https://github.com/talentdeficit/jsx
pkg_jsx_fetch = git
pkg_jsx_repo = https://github.com/talentdeficit/jsx
-pkg_jsx_commit = master
+pkg_jsx_commit = main
PACKAGES += kafka
pkg_kafka_name = kafka
@@ -2823,7 +2823,7 @@ pkg_mysql_description = MySQL client library for Erlang/OTP
pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
pkg_mysql_fetch = git
pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
-pkg_mysql_commit = 1.5.1
+pkg_mysql_commit = 1.7.0
PACKAGES += n2o
pkg_n2o_name = n2o
@@ -4692,8 +4692,19 @@ define dep_autopatch_rebar.erl
case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
{ok, Lock} ->
io:format("~p~n", [Lock]),
- case lists:keyfind("1.1.0", 1, Lock) of
- {_, LockPkgs} ->
+ LockPkgs = case lists:keyfind("1.2.0", 1, Lock) of
+ {_, LP} ->
+ LP;
+ _ ->
+ case lists:keyfind("1.1.0", 1, Lock) of
+ {_, LP} ->
+ LP;
+ _ ->
+ false
+ end
+ end,
+ if
+ is_list(LockPkgs) ->
io:format("~p~n", [LockPkgs]),
case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
{_, {pkg, _, Vsn}, _} ->
@@ -4702,7 +4713,7 @@ define dep_autopatch_rebar.erl
_ ->
false
end;
- _ ->
+ true ->
false
end;
_ ->
@@ -5634,6 +5645,7 @@ $(call comma_list,$(foreach d,$(DEPS),\
{$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
{$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
]}.
+{plugins, [rebar3_hex]}.
{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
endef
@@ -6987,6 +6999,343 @@ apps-eunit: test-build
endif
endif
+# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+HEX_CORE_GIT ?= https://github.com/hexpm/hex_core
+HEX_CORE_COMMIT ?= v0.7.0
+
+PACKAGES += hex_core
+pkg_hex_core_name = hex_core
+pkg_hex_core_description = Reference implementation of Hex specifications
+pkg_hex_core_homepage = $(HEX_CORE_GIT)
+pkg_hex_core_fetch = git
+pkg_hex_core_repo = $(HEX_CORE_GIT)
+pkg_hex_core_commit = $(HEX_CORE_COMMIT)
+
+# We automatically depend on hex_core when the project isn't already.
+$(if $(filter hex_core,$(DEPS) $(BUILD_DEPS) $(DOC_DEPS) $(REL_DEPS) $(TEST_DEPS)),,\
+ $(eval $(call dep_target,hex_core)))
+
+hex-core: $(DEPS_DIR)/hex_core
+ $(verbose) if [ ! -e $(DEPS_DIR)/hex_core/ebin/dep_built ]; then \
+ $(MAKE) -C $(DEPS_DIR)/hex_core IS_DEP=1; \
+ touch $(DEPS_DIR)/hex_core/ebin/dep_built; \
+ fi
+
+# @todo This must also apply to fetching.
+HEX_CONFIG ?=
+
+define hex_config.erl
+ begin
+ Config0 = hex_core:default_config(),
+ Config0$(HEX_CONFIG)
+ end
+endef
+
+define hex_user_create.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ case hex_api_user:create(Config, <<"$(strip $1)">>, <<"$(strip $2)">>, <<"$(strip $3)">>) of
+ {ok, {201, _, #{<<"email">> := Email, <<"url">> := URL, <<"username">> := Username}}} ->
+ io:format("User ~s (~s) created at ~s~n"
+ "Please check your inbox for a confirmation email.~n"
+ "You must confirm before you are allowed to publish packages.~n",
+ [Username, Email, URL]),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(80)
+ end
+endef
+
+# The $(info ) call inserts a new line after the password prompt.
+hex-user-create: hex-core
+ $(if $(HEX_USERNAME),,$(eval HEX_USERNAME := $(shell read -p "Username: " username; echo $$username)))
+ $(if $(HEX_PASSWORD),,$(eval HEX_PASSWORD := $(shell stty -echo; read -p "Password: " password; stty echo; echo $$password) $(info )))
+ $(if $(HEX_EMAIL),,$(eval HEX_EMAIL := $(shell read -p "Email: " email; echo $$email)))
+ $(gen_verbose) $(call erlang,$(call hex_user_create.erl,$(HEX_USERNAME),$(HEX_PASSWORD),$(HEX_EMAIL)))
+
+define hex_key_add.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => iolist_to_binary([<<"Basic ">>, base64:encode(<<"$(strip $1):$(strip $2)">>)])},
+ Permissions = [
+ case string:split(P, <<":">>) of
+ [D] -> #{domain => D};
+ [D, R] -> #{domain => D, resource => R}
+ end
+ || P <- string:split(<<"$(strip $4)">>, <<",">>, all)],
+ case hex_api_key:add(ConfigF, <<"$(strip $3)">>, Permissions) of
+ {ok, {201, _, #{<<"secret">> := Secret}}} ->
+ io:format("Key ~s created for user ~s~nSecret: ~s~n"
+ "Please store the secret in a secure location, such as a password store.~n"
+ "The secret will be requested for most Hex-related operations.~n",
+ [<<"$(strip $3)">>, <<"$(strip $1)">>, Secret]),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(81)
+ end
+endef
+
+hex-key-add: hex-core
+ $(if $(HEX_USERNAME),,$(eval HEX_USERNAME := $(shell read -p "Username: " username; echo $$username)))
+ $(if $(HEX_PASSWORD),,$(eval HEX_PASSWORD := $(shell stty -echo; read -p "Password: " password; stty echo; echo $$password) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_key_add.erl,$(HEX_USERNAME),$(HEX_PASSWORD),\
+ $(if $(name),$(name),$(shell hostname)-erlang-mk),\
+ $(if $(perm),$(perm),api)))
+
+HEX_TARBALL_EXTRA_METADATA ?=
+
+# @todo Check that we can += files
+HEX_TARBALL_FILES ?= \
+ $(wildcard early-plugins.mk) \
+ $(wildcard ebin/$(PROJECT).app) \
+ $(wildcard ebin/$(PROJECT).appup) \
+ $(wildcard $(notdir $(ERLANG_MK_FILENAME))) \
+ $(sort $(call core_find,include/,*.hrl)) \
+ $(wildcard LICENSE*) \
+ $(wildcard Makefile) \
+ $(wildcard plugins.mk) \
+ $(sort $(call core_find,priv/,*)) \
+ $(wildcard README*) \
+ $(wildcard rebar.config) \
+ $(sort $(call core_find,src/,*))
+
+HEX_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT).tar
+
+# @todo Need to check for rebar.config and/or the absence of DEPS to know
+# whether a project will work with Rebar.
+#
+# @todo contributors licenses links in HEX_TARBALL_EXTRA_METADATA
+
+# In order to build the requirements metadata we look into DEPS.
+# We do not require that the project use Hex dependencies, however
+# Hex.pm does require that the package name and version numbers
+# correspond to a real Hex package.
+define hex_tarball_create.erl
+ Files0 = [$(call comma_list,$(patsubst %,"%",$(HEX_TARBALL_FILES)))],
+ Requirements0 = #{
+ $(foreach d,$(DEPS),
+ <<"$(if $(subst hex,,$(call query_fetch_method,$d)),$d,$(if $(word 3,$(dep_$d)),$(word 3,$(dep_$d)),$d))">> => #{
+ <<"app">> => <<"$d">>,
+ <<"optional">> => false,
+ <<"requirement">> => <<"$(call query_version,$d)">>
+ },)
+ $(if $(DEPS),dummy => dummy)
+ },
+ Requirements = maps:remove(dummy, Requirements0),
+ Metadata0 = #{
+ app => <<"$(strip $(PROJECT))">>,
+ build_tools => [<<"make">>, <<"rebar3">>],
+ description => <<"$(strip $(PROJECT_DESCRIPTION))">>,
+ files => [unicode:characters_to_binary(F) || F <- Files0],
+ name => <<"$(strip $(PROJECT))">>,
+ requirements => Requirements,
+ version => <<"$(strip $(PROJECT_VERSION))">>
+ },
+ Metadata = Metadata0$(HEX_TARBALL_EXTRA_METADATA),
+ Files = [case file:read_file(F) of
+ {ok, Bin} ->
+ {F, Bin};
+ {error, Reason} ->
+ io:format("Error trying to open file ~0p: ~0p~n", [F, Reason]),
+ halt(82)
+ end || F <- Files0],
+ case hex_tarball:create(Metadata, Files) of
+ {ok, #{tarball := Tarball}} ->
+ ok = file:write_file("$(strip $(HEX_TARBALL_OUTPUT_FILE))", Tarball),
+ halt(0);
+ {error, Reason} ->
+ io:format("Error ~0p~n", [Reason]),
+ halt(83)
+ end
+endef
+
+hex_tar_verbose_0 = @echo " TAR $(notdir $(ERLANG_MK_TMP))/$(@F)";
+hex_tar_verbose_2 = set -x;
+hex_tar_verbose = $(hex_tar_verbose_$(V))
+
+$(HEX_TARBALL_OUTPUT_FILE): hex-core app
+ $(hex_tar_verbose) $(call erlang,$(call hex_tarball_create.erl))
+
+hex-tarball-create: $(HEX_TARBALL_OUTPUT_FILE)
+
+define hex_release_publish_summary.erl
+ {ok, Tarball} = erl_tar:open("$(strip $(HEX_TARBALL_OUTPUT_FILE))", [read]),
+ ok = erl_tar:extract(Tarball, [{cwd, "$(ERLANG_MK_TMP)"}, {files, ["metadata.config"]}]),
+ {ok, Metadata} = file:consult("$(ERLANG_MK_TMP)/metadata.config"),
+ #{
+ <<"name">> := Name,
+ <<"version">> := Version,
+ <<"files">> := Files,
+ <<"requirements">> := Deps
+ } = maps:from_list(Metadata),
+ io:format("Publishing ~s ~s~n Dependencies:~n", [Name, Version]),
+ case Deps of
+ [] ->
+ io:format(" (none)~n");
+ _ ->
+ [begin
+ #{<<"app">> := DA, <<"requirement">> := DR} = maps:from_list(D),
+ io:format(" ~s ~s~n", [DA, DR])
+ end || {_, D} <- Deps]
+ end,
+ io:format(" Included files:~n"),
+ [io:format(" ~s~n", [F]) || F <- Files],
+ io:format("You may also review the contents of the tarball file.~n"
+ "Please enter your secret key to proceed.~n"),
+ halt(0)
+endef
+
+define hex_release_publish.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ {ok, Tarball} = file:read_file("$(strip $(HEX_TARBALL_OUTPUT_FILE))"),
+ case hex_api_release:publish(ConfigF, Tarball, [{replace, $2}]) of
+ {ok, {200, _, #{}}} ->
+ io:format("Release replaced~n"),
+ halt(0);
+ {ok, {201, _, #{}}} ->
+ io:format("Release published~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(84)
+ end
+endef
+
+hex-release-tarball: hex-core $(HEX_TARBALL_OUTPUT_FILE)
+ $(verbose) $(call erlang,$(call hex_release_publish_summary.erl))
+
+hex-release-publish: hex-core hex-release-tarball
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_publish.erl,$(HEX_SECRET),false))
+
+hex-release-replace: hex-core hex-release-tarball
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_publish.erl,$(HEX_SECRET),true))
+
+define hex_release_delete.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api_release:delete(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $(PROJECT_VERSION))">>) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $(PROJECT_VERSION)) deleted~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(85)
+ end
+endef
+
+hex-release-delete: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_delete.erl,$(HEX_SECRET)))
+
+define hex_release_retire.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ Params = #{<<"reason">> => <<"$(strip $3)">>, <<"message">> => <<"$(strip $4)">>},
+ case hex_api_release:retire(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $2)">>, Params) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $2) has been retired~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(86)
+ end
+endef
+
+hex-release-retire: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_retire.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION)),\
+ $(if $(HEX_REASON),$(HEX_REASON),invalid),\
+ $(HEX_MESSAGE)))
+
+define hex_release_unretire.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api_release:unretire(ConfigF, <<"$(strip $(PROJECT))">>, <<"$(strip $2)">>) of
+ {ok, {204, _, _}} ->
+ io:format("Release $(strip $2) is not retired anymore~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(87)
+ end
+endef
+
+hex-release-unretire: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_release_unretire.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION))))
+
+HEX_DOCS_DOC_DIR ?= doc/
+HEX_DOCS_TARBALL_FILES ?= $(sort $(call core_find,$(HEX_DOCS_DOC_DIR),*))
+HEX_DOCS_TARBALL_OUTPUT_FILE ?= $(ERLANG_MK_TMP)/$(PROJECT)-docs.tar.gz
+
+$(HEX_DOCS_TARBALL_OUTPUT_FILE): hex-core app docs
+ $(hex_tar_verbose) tar czf $(HEX_DOCS_TARBALL_OUTPUT_FILE) -C $(HEX_DOCS_DOC_DIR) \
+ $(HEX_DOCS_TARBALL_FILES:$(HEX_DOCS_DOC_DIR)%=%)
+
+hex-docs-tarball-create: $(HEX_DOCS_TARBALL_OUTPUT_FILE)
+
+define hex_docs_publish.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ {ok, Tarball} = file:read_file("$(strip $(HEX_DOCS_TARBALL_OUTPUT_FILE))"),
+ case hex_api:post(ConfigF,
+ ["packages", "$(strip $(PROJECT))", "releases", "$(strip $(PROJECT_VERSION))", "docs"],
+ {"application/octet-stream", Tarball}) of
+ {ok, {Status, _, _}} when Status >= 200, Status < 300 ->
+ io:format("Docs published~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(88)
+ end
+endef
+
+hex-docs-publish: hex-core hex-docs-tarball-create
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_docs_publish.erl,$(HEX_SECRET)))
+
+define hex_docs_delete.erl
+ {ok, _} = application:ensure_all_started(ssl),
+ {ok, _} = application:ensure_all_started(inets),
+ Config = $(hex_config.erl),
+ ConfigF = Config#{api_key => <<"$(strip $1)">>},
+ case hex_api:delete(ConfigF,
+ ["packages", "$(strip $(PROJECT))", "releases", "$(strip $2)", "docs"]) of
+ {ok, {Status, _, _}} when Status >= 200, Status < 300 ->
+ io:format("Docs removed~n"),
+ halt(0);
+ {ok, {Status, _, Errors}} ->
+ io:format("Error ~b: ~0p~n", [Status, Errors]),
+ halt(89)
+ end
+endef
+
+hex-docs-delete: hex-core
+ $(if $(HEX_SECRET),,$(eval HEX_SECRET := $(shell stty -echo; read -p "Secret: " secret; stty echo; echo $$secret) $(info )))
+ $(gen_verbose) $(call erlang,$(call hex_docs_delete.erl,$(HEX_SECRET),\
+ $(if $(HEX_VERSION),$(HEX_VERSION),$(PROJECT_VERSION))))
+
# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
# This file is part of erlang.mk and subject to the terms of the ISC License.
diff --git a/erlang_ls.config b/erlang_ls.config
new file mode 100644
index 0000000000..e1a0870802
--- /dev/null
+++ b/erlang_ls.config
@@ -0,0 +1,28 @@
+# vim: ft=yaml
+# https://erlang-ls.github.io/configuration/
+# otp_path: "/path/to/otp/lib/erlang"
+deps_dirs:
+ - "deps/*"
+ - "deps/rabbit/apps/*"
+diagnostics:
+ # disabled:
+ # - bound_var_in_pattern
+ enabled:
+ - crossref
+ - dialyzer
+ - compiler
+ # - elvis
+include_dirs:
+ - "deps"
+ - "deps/*/include"
+# lenses:
+# enabled:
+# - ct-run-test
+# - show-behaviour-usages
+# disabled: []
+# macros:
+# - name: DEFINED_WITH_VALUE
+# value: 42
+# code_reload:
+# node: rabbit@localhost
+plt_path: .rabbitmq_server_release.plt
diff --git a/mk/bazel.mk b/mk/bazel.mk
new file mode 100644
index 0000000000..816dca9f09
--- /dev/null
+++ b/mk/bazel.mk
@@ -0,0 +1,42 @@
+BAZELISK ?= /usr/local/bin/bazelisk
+ifeq (darwin,$(PLATFORM))
+$(BAZELISK):
+ brew install bazelisk
+else
+$(BAZELISK):
+ $(error Install bazelisk for your platform: https://github.com/bazelbuild/bazelisk)
+endif
+
+define USER_BAZELRC
+build --@bazel-erlang//:erlang_home=$(shell dirname $$(dirname $$(which erl)))
+build --@bazel-erlang//:erlang_version=$(shell erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)
+build --//:elixir_home=$(shell dirname $$(dirname $$(which iex)))/lib/elixir
+
+# rabbitmqctl wait shells out to 'ps', which is broken in the bazel macOS
+# sandbox (https://github.com/bazelbuild/bazel/issues/7448)
+# adding "--spawn_strategy=local" to the invocation is a workaround
+build --spawn_strategy=local
+
+build --incompatible_strict_action_env
+
+# run one test at a time on the local machine
+build --test_strategy=exclusive
+
+# don't re-run flakes automatically on the local machine
+build --flaky_test_attempts=1
+
+build:buildbuddy --remote_header=x-buildbuddy-api-key=YOUR_API_KEY
+endef
+
+user.bazelrc: export USER_BAZELRC
+user.bazelrc:
+ echo "$$USER_BAZELRC" > $@
+
+bazel-test: $(BAZELISK) | user.bazelrc
+ifeq ($(DEP),)
+ $(error DEP must be set to the dependency that this test is for, e.g. deps/rabbit)
+endif
+ifeq ($(SUITE),)
+ $(error SUITE must be set to the ct suite to run, e.g. queue_type if DEP=deps/rabbit)
+endif
+ $(BAZELISK) test //deps/$(notdir $(DEP)):$(SUITE)_SUITE
diff --git a/mk/github-actions.mk b/mk/github-actions.mk
index 6a9989808b..58c1e76d63 100644
--- a/mk/github-actions.mk
+++ b/mk/github-actions.mk
@@ -1,8 +1,3 @@
-YTT ?= /usr/local/bin/ytt
-
-$(YTT):
- $(error Please install ytt from https://get-ytt.io/)
-
VENDORED_COMPONENTS = rabbit_common \
rabbit \
amqp_client \
@@ -47,112 +42,9 @@ VENDORED_COMPONENTS = rabbit_common \
rabbitmq_web_stomp \
rabbitmq_web_stomp_examples
-DEPS_YAML_FILE = workflow_sources/deps.yml
-
-define dep_yaml_chunk
-$(eval SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(wildcard deps/$(1)/test/*_SUITE.erl)))))
-echo -n "\n- name: $(1)\n suites:$(if $(SUITES),$(foreach suite,$(SUITES),\n - name: $(suite)\n time: 0), [])" >> $(DEPS_YAML_FILE);
-endef
-
-$(DEPS_YAML_FILE):
- @echo -n "#@data/values\n---\n#@overlay/match missing_ok=True\ndeps:" > $@
- @$(foreach dep,$(VENDORED_COMPONENTS),$(call dep_yaml_chunk,$(dep)))
- @cat $@ | git stripspace > $@.fixed && mv $@.fixed $@
-
-ERLANG_VERSIONS = 22.3 23.1
-ERLANG_VERSIONS_YAML1 = [$(foreach v,$(ERLANG_VERSIONS),,"$(v)")]
-UNFIXED := [,"
-ERLANG_VERSIONS_YAML2 = $(subst $(UNFIXED),[",$(ERLANG_VERSIONS_YAML1))
-
-.github/workflows/base-images.yaml: $(YTT) $(wildcard workflow_sources/base_image/*)
- ytt -f workflow_sources/base_image \
- -f workflow_sources/base_values.yml \
- --data-value-yaml erlang_versions='$(ERLANG_VERSIONS_YAML2)' \
- --output-files /tmp
- cat /tmp/workflow.yml | sed s/a_magic_string_that_we_will_sed_to_on/on/ \
- > $@
-
-.github/workflows/test-erlang-otp-%.yaml: $(YTT) $(DEPS_YAML_FILE) $(wildcard workflow_sources/test/*)
- ytt -f workflow_sources/test \
- -f workflow_sources/base_values.yml \
- -f $(DEPS_YAML_FILE) \
- --data-value-yaml erlang_versions='$(ERLANG_VERSIONS_YAML2)' \
- --data-value erlang_version=$* \
- --output-files /tmp
- cat /tmp/workflow.yml | sed s/a_magic_string_that_we_will_sed_to_on/on/ \
- > $@
-
-monorepo-actions: \
- .github/workflows/base-images.yaml \
- $(foreach v,$(ERLANG_VERSIONS), .github/workflows/test-erlang-otp-$(v).yaml)
-
-DOCKER_REPO ?= eu.gcr.io/cf-rabbitmq-core
-
-CI_BASE_IMAGES = $(foreach v,$(ERLANG_VERSIONS),ci-base-image-$(v))
-.PHONY: $(CI_BASE_IMAGES)
-$(CI_BASE_IMAGES):
- docker build . \
- -f ci/dockerfiles/$(subst ci-base-image-,,$@)/base \
- -t $(DOCKER_REPO)/ci-base:$(subst ci-base-image-,,$@)
-
-.PHONY: ci-base-images
-ci-base-images: $(CI_BASE_IMAGES)
-
-PUSHES = $(foreach v,$(ERLANG_VERSIONS),push-base-image-$(v))
-.PHONY: $(PUSHES)
-$(PUSHES):
- docker push $(DOCKER_REPO)/ci-base:$(subst push-base-image-,,$@)
-
-.PHONY: push-base-images
-push-base-images: $(PUSHES)
-
-LOCAL_CI_GOALS = $(foreach dep,$(filter-out rabbitmq_cli,$(VENDORED_COMPONENTS)),ci-$(dep))
-ERLANG_VERSION ?= 23.1
-SKIP_DIALYZE ?= False
-
-TAG = erlang-$(ERLANG_VERSION)-rabbitmq-$(shell git rev-parse HEAD)$(shell git diff-index --quiet HEAD -- || echo -dirty)
-LOCAL_IMAGE = $(DOCKER_REPO)/ci:$(TAG)
-
-.PHONY: local-ci-image
-local-ci-image:
- docker build . \
- -f ci/dockerfiles/ci \
- -t $(LOCAL_IMAGE) \
- --build-arg ERLANG_VERSION=$(ERLANG_VERSION) \
- --build-arg GITHUB_RUN_ID=none \
- --build-arg BUILDEVENT_APIKEY=$(BUILDEVENT_APIKEY) \
- --build-arg GITHUB_SHA=$$(git rev-parse HEAD) \
- --build-arg base_rmq_ref=master \
- --build-arg current_rmq_ref=$$(git rev-parse --abbrev-ref HEAD) \
- --build-arg RABBITMQ_VERSION=3.9.0
-
-.PHONY: $(LOCAL_CI_GOALS)
-$(LOCAL_CI_GOALS): local-ci-image
- docker run --rm \
- --env project=$(subst ci-,,$@) \
- --env SKIP_DIALYZE=$(SKIP_DIALYZE) \
- --env GITHUB_RUN_ID=none \
- --env BUILDEVENT_APIKEY=$(BUILDEVENT_APIKEY) \
- --env STEP_START=$$(date +%s) \
- --volume /tmp/ct-logs:/ct-logs \
- --oom-score-adj -500 \
- $(LOCAL_IMAGE) \
- /workspace/rabbitmq/ci/scripts/tests.sh
-
-ci-rabbitmq_cli: local-ci-image
- docker run --rm \
- --env project=$(subst ci-,,$@) \
- --env SKIP_DIALYZE=$(SKIP_DIALYZE) \
- --env GITHUB_RUN_ID=none \
- --env BUILDEVENT_APIKEY=$(BUILDEVENT_APIKEY) \
- --env STEP_START=$$(date +%s) \
- --volume /tmp/broker-logs:/broker-logs \
- $(LOCAL_IMAGE) \
- /workspace/rabbitmq/ci/scripts/rabbitmq_cli.sh
+.PHONY: distclean-%
+distclean-%:
+ $(MAKE) -C deps/$* distclean || echo "Failed to distclean $*"
-.PHONY: docker
-docker: local-ci-image
- docker run --rm -it \
- --oom-score-adj -500 \
- $(LOCAL_IMAGE) \
- /bin/bash
+.PHONY: monorepo-distclean
+monorepo-distclean: $(foreach dep,$(VENDORED_COMPONENTS),distclean-$(dep))
diff --git a/mk/rabbitmq-mix.mk b/mk/rabbitmq-mix.mk
index d946a819b6..c6f73163e0 100644
--- a/mk/rabbitmq-mix.mk
+++ b/mk/rabbitmq-mix.mk
@@ -13,9 +13,9 @@ HEX_OFFLINE := 1
override MIX_HOME := $(DEPS_DIR)/.mix
-# In addition to `$MIX_HOME`, we still have to set `$HOME` which is used
-# to find `~/.hex` where the Hex.pm cache and packages are stored.
+# In addition to `$MIX_HOME`, we still have to set `$HEX_HOME` which is used to
+# find `~/.hex` where the Hex.pm cache and packages are stored.
-override HOME := $(DEPS_DIR)
+override HEX_HOME := $(DEPS_DIR)/.hex
-export HEX_OFFLINE MIX_HOME HOME
+export HEX_OFFLINE MIX_HOME HEX_HOME
diff --git a/mk/stats.mk b/mk/stats.mk
deleted file mode 100644
index 1f4b59214f..0000000000
--- a/mk/stats.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-# Used & tested on macOS
-
-DOCKER := /usr/local/bin/docker
-
-define STATS_CONTAINER
-$(DOCKER) run \
- --interactive --tty --rm \
- --volume rabbitmq_releases:/home/node \
- node bash
-endef
-.PHONY: stats
-stats:
- $(verbose) $(STATS_CONTAINER) -c "cd /home/node && npm install rels && node_modules/.bin/rels --repo rabbitmq/rabbitmq-server --all"
-
-.PHONY: stats_interactive
-stats_interactive:
- $(verbose) $(STATS_CONTAINER)
diff --git a/packaging/Makefile b/packaging/Makefile
index ab656cfaab..619ad522e7 100644
--- a/packaging/Makefile
+++ b/packaging/Makefile
@@ -1,46 +1,13 @@
-# Platform detection.
-
-ifeq ($(PLATFORM),)
-UNAME_S := $(shell uname -s)
-
-ifeq ($(UNAME_S),Linux)
-PLATFORM = linux
-else ifeq ($(UNAME_S),Darwin)
-PLATFORM = darwin
-else ifeq ($(UNAME_S),SunOS)
-PLATFORM = solaris
-else ifeq ($(UNAME_S),GNU)
-PLATFORM = gnu
-else ifeq ($(UNAME_S),FreeBSD)
-PLATFORM = freebsd
-else ifeq ($(UNAME_S),NetBSD)
-PLATFORM = netbsd
-else ifeq ($(UNAME_S),OpenBSD)
-PLATFORM = openbsd
-else ifeq ($(UNAME_S),DragonFly)
-PLATFORM = dragonfly
-else ifeq ($(shell uname -o),Msys)
-PLATFORM = msys2
-else
-$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
-endif
-endif
-
-all: packages
+all: package-generic-unix
@:
# --------------------------------------------------------------------
# Packaging.
# --------------------------------------------------------------------
-.PHONY: packages package-deb \
- package-rpm \
- package-rpm-redhat package-rpm-fedora \
- package-rpm-rhel6 package-rpm-rhel7 package-rpm-rhel8 \
- package-rpm-suse package-rpm-opensuse package-rpm-sles11 \
- package-windows \
- package-generic-unix \
- docker-image
+.PHONY: package-generic-unix \
+ docker-image \
+ docker-image-push
PACKAGES_DIR ?= ../PACKAGES
SOURCE_DIST_FILE ?= $(wildcard $(PACKAGES_DIR)/rabbitmq-server-*.tar.xz)
@@ -68,53 +35,18 @@ VARS = SOURCE_DIST_FILE="$(abspath $(SOURCE_DIST_FILE))" \
PACKAGES_DIR="$(abspath $(PACKAGES_DIR))" \
SIGNING_KEY="$(SIGNING_KEY)"
-packages: package-deb package-rpm package-windows package-generic-unix
- @:
-
-package-deb: $(SOURCE_DIST_FILE)
- $(gen_verbose) $(MAKE) -C debs/Debian $(VARS) all $(DO_CLEAN)
-
-package-rpm: package-rpm-redhat package-rpm-suse
- @:
-
-# FIXME: Why not package-rpm-fedora?
-package-rpm-redhat: package-rpm-rhel6 package-rpm-rhel7 package-rpm-rhel8
- @:
-
-package-rpm-fedora: $(SOURCE_DIST_FILE)
- $(gen_verbose) $(MAKE) -C RPMS/Fedora $(VARS) all $(DO_CLEAN)
-
-package-rpm-rhel6: $(SOURCE_DIST_FILE)
- $(gen_verbose) $(MAKE) -C RPMS/Fedora $(VARS) RPM_OS=rhel6 all $(DO_CLEAN)
-
-package-rpm-rhel7: $(SOURCE_DIST_FILE)
- $(gen_verbose) $(MAKE) -C RPMS/Fedora $(VARS) RPM_OS=rhel7 all $(DO_CLEAN)
-
-package-rpm-rhel8: $(SOURCE_DIST_FILE)
- $(gen_verbose) $(MAKE) -C RPMS/Fedora $(VARS) RPM_OS=rhel8 all $(DO_CLEAN)
-
-package-rpm-suse: package-rpm-opensuse package-rpm-sles11
- @:
-
-package-rpm-opensuse: $(SOURCE_DIST_FILE)
- $(gen_verbose) $(MAKE) -C RPMS/Fedora $(VARS) RPM_OS=opensuse all $(DO_CLEAN)
-
-package-rpm-sles11: $(SOURCE_DIST_FILE)
- $(gen_verbose) $(MAKE) -C RPMS/Fedora $(VARS) RPM_OS=sles11 all $(DO_CLEAN)
-
-package-windows: $(SOURCE_DIST_FILE)
- $(gen_verbose) $(MAKE) -C windows $(VARS) all $(DO_CLEAN)
- $(verbose) $(MAKE) -C windows-exe $(VARS) all $(DO_CLEAN)
-
package-generic-unix: $(SOURCE_DIST_FILE)
$(gen_verbose) $(MAKE) -C generic-unix $(VARS) all $(DO_CLEAN)
-docker-image: package-generic-unix
+docker-image:
$(gen_verbose) $(MAKE) -C docker-image $(VARS) all $(DO_CLEAN)
+docker-image-push:
+ $(gen_verbose) $(MAKE) -C docker-image $(VARS) push $(DO_CLEAN)
+
.PHONY: clean
clean:
- for subdir in debs/Debian RPMS/Fedora windows windows-exe generic-unix docker-image; do \
+ for subdir in generic-unix docker-image; do \
$(MAKE) -C "$$subdir" clean; \
done
diff --git a/packaging/RPMS/Fedora/Makefile b/packaging/RPMS/Fedora/Makefile
deleted file mode 100644
index 475ec63ad5..0000000000
--- a/packaging/RPMS/Fedora/Makefile
+++ /dev/null
@@ -1,171 +0,0 @@
-SOURCE_DIST_FILE ?= $(wildcard ../../../rabbitmq-server-*.tar.xz)
-
-ifneq ($(filter-out clean,$(MAKECMDGOALS)),)
-ifeq ($(SOURCE_DIST_FILE),)
-$(error Cannot find source archive; please specify SOURCE_DIST_FILE)
-endif
-ifneq ($(words $(SOURCE_DIST_FILE)),1)
-$(error Multiple source archives found; please specify SOURCE_DIST_FILE)
-endif
-endif
-
-VERSION ?= $(patsubst rabbitmq-server-%.tar.xz,%,$(notdir $(SOURCE_DIST_FILE)))
-ifeq ($(VERSION),)
-$(error Cannot determine version; please specify VERSION)
-endif
-
-# $(RPM_VERSION) doesn't include the package revision: this one is only
-# set in the .spec file.
-RPM_VERSION = $(subst -,~,$(VERSION))
-RPM_ORIG_TARBALL = rabbitmq-server-$(VERSION).tar.xz
-
-TOP_DIR = $(shell pwd)
-# Under debian we do not want to check build dependencies, since that
-# only checks build-dependencies using rpms, not debs
-DEFINES = --define 'upstream_version $(VERSION)' \
- --define '_topdir $(TOP_DIR)' --define '_tmppath $(TOP_DIR)/tmp' \
- --define '_sysconfdir /etc' --define '_localstatedir /var'
-
-RPM_OS ?= fedora
-DO_SIGN ?= yes
-
-ifeq "$(RPM_OS)" "sles11"
-FUNCTION_LIBRARY=
-REQUIRES=/sbin/chkconfig /sbin/service
-OS_DEFINES=--define '_initrddir /etc/init.d' --define 'dist .sles11' --define 'suse_version 1012'
-SPEC_DEFINES=--define 'group_tag Productivity/Networking/Other'
-START_PROG=startproc
-DO_SIGN=no
-else ifeq "$(RPM_OS)" "opensuse"
-FUNCTION_LIBRARY=
-REQUIRES=/sbin/chkconfig /sbin/service
-OS_DEFINES=--define '_unitdir /usr/lib/systemd/system' --define 'dist .suse' --define 'suse_version 1315'
-SPEC_DEFINES=--define 'group_tag Productivity/Networking/Other'
-START_PROG=startproc
-else
-FUNCTION_LIBRARY=\# Source function library.\n. /etc/init.d/functions
-REQUIRES=chkconfig initscripts
-OS_DEFINES=--define '_initrddir /etc/rc.d/init.d'
-ifeq "$(RPM_OS)" "rhel6"
-SPEC_DEFINES=--define 'group_tag Development/Libraries' --define 'dist .el6' --define 'rhel 6'
-else ifeq "$(RPM_OS)" "rhel7"
-SPEC_DEFINES=--define 'group_tag Development/Libraries' --define '_unitdir /usr/lib/systemd/system' --define 'dist .el7' --define 'rhel 7'
-else ifeq "$(RPM_OS)" "rhel8"
-SPEC_DEFINES=--define 'group_tag Development/Libraries' --define '_unitdir /usr/lib/systemd/system' --define 'dist .el8' --define 'rhel 8'
-else
-SPEC_DEFINES=--define 'group_tag Development/Libraries'
-endif
-START_PROG=daemon
-endif
-
-# Package signing.
-#
-# At least the key ID is mandatory ($(SIGNING_KEY)). If it's set, we
-# call `rpm --addsign`, otherwise we do nothing.
-#
-# To maintain backward compatibility, the caller can also specify
-# $(KEYSDIR) or $(GNUPG_PATH) and we set GNUPGHOME accordingly.
-
-ifneq ($(KEYSDIR),)
- GNUPGHOME = $(KEYSDIR)/keyring/.gnupg
- export GNUPGHOME
-endif
-ifneq ($(GNUPG_PATH),)
- GNUPGHOME = $(GNUPG_PATH)/.gnupg
- export GNUPGHOME
-endif
-
-unexport DEPS_DIR
-unexport ERL_LIBS
-MAKEOVERRIDES =
-
-.PHONY: all package clean
-
-all: package
- @:
-
-package: clean
-# Log Erlang version.
- @echo
- @echo '--------------------------------------------------'
- @echo "Erlang and Elixir versions used to compile:"
- @elixir --version
- @echo '--------------------------------------------------'
- @echo
-# If a signing key ID was specified, verify that the key is available
-# before starting a possibly long build. At the same time, display some
-# useful informations on the key so the caller can double-check if he
-# wants.
-ifneq ($(SIGNING_KEY),)
-ifeq ($(DO_SIGN),yes)
- @echo
- @echo '--------------------------------------------------'
- @echo "The package will be signed with key $(SIGNING_KEY):"
- @gpg -K "$(SIGNING_KEY)"
- @echo '--------------------------------------------------'
- @echo
-else
- @echo
- @echo '--------------------------------------------------'
- @echo "Packages of type '$(RPM_OS)' are not signed because"
- @echo "the signature can't be verified on the target OS"
- @echo '--------------------------------------------------'
- @echo
-endif
-endif
-# Create the build tree and populate it.
- mkdir -p BUILD SOURCES SPECS SRPMS RPMS tmp
- cp $(SOURCE_DIST_FILE) SOURCES/$(RPM_ORIG_TARBALL)
- cp rabbitmq-server.spec SPECS/rabbitmq-server.spec.in
- cp rabbitmq-server.service SOURCES/rabbitmq-server.service
- cp rabbitmq-server.tmpfiles SOURCES/rabbitmq-server.tmpfiles
- sed \
- -e 's|^START_PROG=.*$$|START_PROG="$(START_PROG)"|' \
- -e 's|^@FUNCTION_LIBRARY@|$(FUNCTION_LIBRARY)|' \
- < rabbitmq-server.init \
- > SOURCES/rabbitmq-server.init
- cp rabbitmq-server.logrotate SOURCES/rabbitmq-server.logrotate
-# Possibly update the changelog inside the spec (in the created build
-# directory):
-# - if it contains an entry for the specified version, do nothing;
-# - otherwise, prepend a generated entry using "1" as the package
-# revision.
- scripts/update-changelog.sh SPECS/rabbitmq-server.spec.in "$(RPM_VERSION)"
- sed -e 's|%%VERSION%%|$(RPM_VERSION)|;s|%%REQUIRES%%|$(REQUIRES)|' \
- < SPECS/rabbitmq-server.spec.in \
- > SPECS/rabbitmq-server.spec
-# Finally build the package!
- rpmbuild -ba --nodeps SPECS/rabbitmq-server.spec $(DEFINES) \
- $(OS_DEFINES) $(SPEC_DEFINES)
-# Before we remove the source directory, copy the possibly updated
-# debian/changelog to the original debian subdirectory, if the caller
-# asks for it. He is then responsible for committing it.
-ifeq ($(SAVE_CHANGELOG),yes)
- mv SPECS/rabbitmq-server.spec.in rabbitmq-server.spec
-endif
- rm -rf SOURCES SPECS BUILD tmp
-# If a PGP key is specified, sign source and binary packages. We start
-# rpm(1) in a new session and redirect its stdin so that it doesn't
-# prompt for a passphrase.
-# https://ask.fedoraproject.org/en/question/56107/can-gpg-agent-be-used-when-signing-rpm-packages/?answer=76395#post-id-76395
-ifeq ($(DO_SIGN),yes)
-ifneq ($(SIGNING_KEY),)
- setsid \
- rpm --addsign \
- --define '_signature gpg' \
- --define '_gpg_name $(SIGNING_KEY)' \
- SRPMS/*-$(RPM_VERSION)*.rpm \
- RPMS/noarch/*-$(RPM_VERSION)*.rpm \
- < /dev/null
-endif
-endif
-# If $(PACKAGES_DIR) is specified, move all package files to that
-# location.
-ifneq ($(PACKAGES_DIR),)
- mkdir -p "$(PACKAGES_DIR)"
- mv SRPMS/*-$(RPM_VERSION)*.rpm RPMS/noarch/*-$(RPM_VERSION)*.rpm \
- "$(PACKAGES_DIR)"
-endif
-
-clean:
- rm -rf SOURCES SPECS RPMS SRPMS BUILD tmp
diff --git a/packaging/RPMS/Fedora/rabbitmq-server.init b/packaging/RPMS/Fedora/rabbitmq-server.init
deleted file mode 100644
index 0b848ff6d9..0000000000
--- a/packaging/RPMS/Fedora/rabbitmq-server.init
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/bin/sh
-#
-# rabbitmq-server RabbitMQ broker
-#
-# chkconfig: - 80 05
-# description: RabbitMQ is a multi-protocol messaging broker
-#
-
-### BEGIN INIT INFO
-# Provides: rabbitmq-server
-# Required-Start: $remote_fs $network
-# Required-Stop: $remote_fs $network
-# Default-Start: 3 5
-# Default-Stop: 0 1 2 6
-# Description: RabbitMQ broker
-# Short-Description: RabbitMQ is a multi-protocol messaging broker
-### END INIT INFO
-
-@FUNCTION_LIBRARY@
-
-PATH=/sbin:/usr/sbin:/bin:/usr/bin
-NAME=rabbitmq-server
-DAEMON=/usr/sbin/${NAME}
-CONTROL=/usr/sbin/rabbitmqctl
-DESC=rabbitmq-server
-USER=rabbitmq
-PID_FILE=/var/run/rabbitmq/pid
-RABBITMQ_STARTUP_TIMEOUT=600
-
-START_PROG= # Set when building package
-LOCK_FILE=/var/lock/subsys/$NAME
-
-test -x $DAEMON || exit 0
-test -x $CONTROL || exit 0
-
-RETVAL=0
-set -e
-
-[ -f /etc/default/${NAME} ] && . /etc/default/${NAME}
-
-# $RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR is a directory where rabbitmq-server(8)
-# console output (both stdout and sterr) is redirected. It defaults to
-# /var/log/rabbitmq which is configured by the package already (i.e. the
-# directory is created and its ownership is set).
-#
-# We still look at the value of $RABBITMQ_LOG_BASE, possibly set in
-# /etc/default/rabbitmq-server for backward compatibility. But we use a
-# specific variable name here (instead of $RABBITMQ_LOG_BASE) to avoid any
-# confusion with RabbitMQ's own logging configuration. Indeed, the console
-# output redirection is a responsibility of the package, not RabbitMQ itself.
-: ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR:=${RABBITMQ_LOG_BASE:-/var/log/rabbitmq}}
-
-ensure_pid_dir () {
- PID_DIR=`dirname ${PID_FILE}`
- if [ ! -d ${PID_DIR} ] ; then
- mkdir -p ${PID_DIR}
- chown -R ${USER}:${USER} ${PID_DIR}
- chmod 755 ${PID_DIR}
- fi
-}
-
-remove_pid () {
- rm -f ${PID_FILE}
- rmdir `dirname ${PID_FILE}` || :
-}
-
-start_rabbitmq () {
- status_rabbitmq quiet
- if [ $RETVAL = 0 ] ; then
- echo RabbitMQ is currently running
- else
- RETVAL=0
- ensure_pid_dir
- set +e
- RABBITMQ_PID_FILE=$PID_FILE $START_PROG $DAEMON \
- > "${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/startup_log" \
- 2> "${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/startup_err" \
- 0<&- &
- $CONTROL wait --timeout $RABBITMQ_STARTUP_TIMEOUT $PID_FILE >/dev/null 2>&1
- RETVAL=$?
- set -e
- case "$RETVAL" in
- 0)
- echo SUCCESS
- if [ -n "$LOCK_FILE" ] ; then
- touch $LOCK_FILE
- fi
- ;;
- *)
- remove_pid
- echo FAILED - check ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/startup_\{log, _err\}
- RETVAL=1
- ;;
- esac
- fi
-}
-
-stop_rabbitmq () {
- status_rabbitmq quiet
- if [ $RETVAL = 0 ] ; then
- set +e
- $CONTROL stop ${PID_FILE} \
- > ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/shutdown_log \
- 2> ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/shutdown_err
- RETVAL=$?
- set -e
- if [ $RETVAL = 0 ] ; then
- remove_pid
- if [ -n "$LOCK_FILE" ] ; then
- rm -f $LOCK_FILE
- fi
- else
- echo FAILED - check ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/shutdown_\{log, _err\}
- fi
- else
- echo RabbitMQ is not running
- RETVAL=0
- fi
-}
-
-status_rabbitmq() {
- set +e
- if [ "$1" != "quiet" ] ; then
- $CONTROL status 2>&1
- else
- $CONTROL status > /dev/null 2>&1
- fi
- if [ $? != 0 ] ; then
- RETVAL=3
- fi
- set -e
-}
-
-rotate_logs_rabbitmq() {
- set +e
- $CONTROL rotate_logs
- if [ $? != 0 ] ; then
- RETVAL=1
- fi
- set -e
-}
-
-restart_running_rabbitmq () {
- status_rabbitmq quiet
- if [ $RETVAL = 0 ] ; then
- restart_rabbitmq
- else
- echo RabbitMQ is not runnning
- RETVAL=0
- fi
-}
-
-restart_rabbitmq() {
- stop_rabbitmq
- start_rabbitmq
-}
-
-case "$1" in
- start)
- echo -n "Starting $DESC: "
- start_rabbitmq
- echo "$NAME."
- ;;
- stop)
- echo -n "Stopping $DESC: "
- stop_rabbitmq
- echo "$NAME."
- ;;
- status)
- status_rabbitmq
- ;;
- rotate-logs)
- echo -n "Rotating log files for $DESC: "
- rotate_logs_rabbitmq
- ;;
- force-reload|reload|restart)
- echo -n "Restarting $DESC: "
- restart_rabbitmq
- echo "$NAME."
- ;;
- try-restart)
- echo -n "Restarting $DESC: "
- restart_running_rabbitmq
- echo "$NAME."
- ;;
- *)
- echo "Usage: $0 {start|stop|status|rotate-logs|restart|condrestart|try-restart|reload|force-reload}" >&2
- RETVAL=1
- ;;
-esac
-
-exit $RETVAL
diff --git a/packaging/RPMS/Fedora/rabbitmq-server.logrotate b/packaging/RPMS/Fedora/rabbitmq-server.logrotate
deleted file mode 100644
index 21a9fc0b84..0000000000
--- a/packaging/RPMS/Fedora/rabbitmq-server.logrotate
+++ /dev/null
@@ -1,7 +0,0 @@
-/var/log/rabbitmq/*.log {
- weekly
- missingok
- rotate 20
- compress
- notifempty
-}
diff --git a/packaging/RPMS/Fedora/rabbitmq-server.service b/packaging/RPMS/Fedora/rabbitmq-server.service
deleted file mode 100644
index 4002a6517e..0000000000
--- a/packaging/RPMS/Fedora/rabbitmq-server.service
+++ /dev/null
@@ -1,41 +0,0 @@
-[Unit]
-Description=RabbitMQ broker
-After=syslog.target network.target
-
-[Service]
-Type=notify
-User=rabbitmq
-Group=rabbitmq
-UMask=0027
-NotifyAccess=all
-TimeoutStartSec=600
-
-# To override LimitNOFILE, create the following file:
-#
-# /etc/systemd/system/rabbitmq-server.service.d/limits.conf
-#
-# with the following content:
-#
-# [Service]
-# LimitNOFILE=65536
-
-LimitNOFILE=32768
-
-# Note: systemd on CentOS 7 complains about in-line comments,
-# so only append them here
-#
-# Restart:
-# The following setting will automatically restart RabbitMQ
-# in the event of a failure. systemd service restarts are not a
-# replacement for service monitoring. Please see
-# https://www.rabbitmq.com/monitoring.html
-Restart=on-failure
-RestartSec=10
-WorkingDirectory=/var/lib/rabbitmq
-ExecStart=/usr/sbin/rabbitmq-server
-ExecStop=/usr/sbin/rabbitmqctl shutdown
-# See rabbitmq/rabbitmq-server-release#51
-SuccessExitStatus=69
-
-[Install]
-WantedBy=multi-user.target
diff --git a/packaging/RPMS/Fedora/rabbitmq-server.spec b/packaging/RPMS/Fedora/rabbitmq-server.spec
deleted file mode 100644
index 48bf1b1f2b..0000000000
--- a/packaging/RPMS/Fedora/rabbitmq-server.spec
+++ /dev/null
@@ -1,520 +0,0 @@
-%define debug_package %{nil}
-%define erlang_minver 22.3
-
-Name: rabbitmq-server
-Version: %%VERSION%%
-Release: 1%{?dist}
-License: MPLv2.0 and MIT and ASL 2.0 and BSD
-Group: %{group_tag}
-Source: https://www.rabbitmq.com/releases/rabbitmq-server/v%{upstream_version}/%{name}-%{upstream_version}.tar.xz
-Source1: rabbitmq-server.init
-Source2: rabbitmq-server.logrotate
-Source3: rabbitmq-server.service
-Source4: rabbitmq-server.tmpfiles
-URL: https://www.rabbitmq.com/
-BuildArch: noarch
-BuildRequires: erlang >= %{erlang_minver}
-BuildRequires: elixir
-BuildRequires: gzip, sed, zip, rsync
-
-%if 0%{?fedora} || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
-BuildRequires: systemd
-%endif
-
-Requires: erlang >= %{erlang_minver}
-Requires: logrotate, socat
-BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-%{_arch}-root
-Summary: The RabbitMQ server
-
-%if 0%{?fedora} || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
-Requires(pre): systemd
-Requires(post): systemd
-Requires(preun): systemd
-%else
-Requires(post): %%REQUIRES%%
-Requires(pre): %%REQUIRES%%
-%endif
-
-%description
-RabbitMQ is an open source multi-protocol messaging broker.
-
-# We want to install into /usr/lib, even on 64-bit platforms
-%define _rabbit_libdir %{_exec_prefix}/lib/rabbitmq
-%define _rabbit_erllibdir %{_rabbit_libdir}/lib/rabbitmq_server-%{upstream_version}
-%define _rabbit_server_ocf scripts/rabbitmq-server.ocf
-%define _plugins_state_dir %{_localstatedir}/lib/rabbitmq/plugins
-%define _rabbit_server_ha_ocf scripts/rabbitmq-server-ha.ocf
-%define _rabbitmqctl_autocomplete scripts/rabbitmqctl-autocomplete.sh
-%define _rabbitmq_user rabbitmq
-%define _rabbitmq_group rabbitmq
-
-
-%define _maindir %{buildroot}%{_rabbit_erllibdir}
-
-
-%prep
-%setup -q -n %{name}-%{upstream_version}
-
-%build
-cp -a deps/rabbit/docs/README-for-packages %{_builddir}/rabbitmq-server-%{upstream_version}/README
-env -u DEPS_DIR make dist manpages
-
-%install
-rm -rf %{buildroot}
-
-env -u DEPS_DIR make install install-bin install-man DESTDIR=%{buildroot} PREFIX=%{_exec_prefix} RMQ_ROOTDIR=%{_rabbit_libdir} RMQ_ERLAPP_DIR=%{_rabbit_erllibdir} MANDIR=%{_mandir}
-
-mkdir -p %{buildroot}%{_localstatedir}/lib/rabbitmq/mnesia
-mkdir -p %{buildroot}%{_localstatedir}/log/rabbitmq
-
-#Copy all necessary lib files etc.
-
-%if 0%{?fedora} || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
-install -p -D -m 0644 %{S:3} %{buildroot}%{_unitdir}/%{name}.service
-%else
-install -p -D -m 0755 %{S:1} %{buildroot}%{_initrddir}/rabbitmq-server
-%endif
-
-install -p -D -m 0755 %{_rabbit_server_ocf} %{buildroot}%{_exec_prefix}/lib/ocf/resource.d/rabbitmq/rabbitmq-server
-install -p -D -m 0755 %{_rabbit_server_ha_ocf} %{buildroot}%{_exec_prefix}/lib/ocf/resource.d/rabbitmq/rabbitmq-server-ha
-install -p -D -m 0644 %{S:2} %{buildroot}%{_sysconfdir}/logrotate.d/rabbitmq-server
-
-install -p -D -m 0755 %{_rabbitmqctl_autocomplete} %{buildroot}%{_sysconfdir}/profile.d/rabbitmqctl-autocomplete.sh
-install -p -D -m 0755 scripts/zsh_autocomplete.sh %{buildroot}%{_datarootdir}/zsh/vendor-functions/_enable_rabbitmqctl_completion
-
-
-mkdir -p %{buildroot}%{_sysconfdir}/rabbitmq
-
-mkdir -p %{buildroot}%{_sbindir}
-sed \
- -e 's|@RABBITMQ_USER@|%{_rabbitmq_user}|' -e 's|@RABBITMQ_GROUP@|%{_rabbitmq_group}|' \
- < scripts/rabbitmq-script-wrapper \
- > %{buildroot}%{_sbindir}/rabbitmqctl
-chmod 0755 %{buildroot}%{_sbindir}/rabbitmqctl
-for script in rabbitmq-server rabbitmq-plugins rabbitmq-diagnostics rabbitmq-queues rabbitmq-upgrade; do \
- cp -a %{buildroot}%{_sbindir}/rabbitmqctl \
- %{buildroot}%{_sbindir}/$script; \
-done
-
-%if 0%{?fedora} > 14 || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
-install -D -p -m 0644 %{SOURCE4} %{buildroot}%{_prefix}/lib/tmpfiles.d/%{name}.conf
-%endif
-
-rm %{_maindir}/LICENSE* %{_maindir}/INSTALL
-
-#Build the list of files
-echo '%defattr(-,root,root, -)' >%{_builddir}/%{name}.files
-find %{buildroot} -path %{buildroot}%{_sysconfdir} -prune -o '!' -type d -printf "/%%P\n" >>%{_builddir}/%{name}.files
-find %{buildroot} -path "*%{_initrddir}*" -type f -printf "/%%P\n" >>%{_builddir}/%{name}.files
-
-%pre
-
-# If the log directory exists, record its permissions so we can restore
-# them after an upgrade. The goal is to set the permissions to 0750 on a
-# fresh install but to keep permissions set by the user or a different
-# default from a previous package.
-if test -d /var/log/rabbitmq; then
- stat --format '%a' /var/log/rabbitmq > /var/log/rabbitmq/permissions
-fi
-
-if [ "$1" = 2 ]; then
- # Upgrade:
- # Stop previous instance of rabbitmq-server. But before doing this, we
- # also record if the service is running: it is used again in %post to
- # restart the service.
- if %{_sbindir}/rabbitmqctl status >/dev/null 2>&1; then
- touch %{_localstatedir}/lib/rabbitmq/rabbitmq-running-before-upgrade
- else
- rm -f %{_localstatedir}/lib/rabbitmq/rabbitmq-running-before-upgrade
- fi
-
-%if 0%{?fedora} || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
- systemctl stop rabbitmq-server
-%else
- /sbin/service rabbitmq-server stop
-%endif
-fi
-
-# create rabbitmq group
-if ! getent group rabbitmq >/dev/null; then
- groupadd -r rabbitmq
-fi
-
-# create rabbitmq user
-if ! getent passwd rabbitmq >/dev/null; then
- useradd -r -g rabbitmq -d %{_localstatedir}/lib/rabbitmq -s /sbin/nologin rabbitmq \
- -c "RabbitMQ messaging server"
-fi
-
-%post
-
-%if 0%{?fedora} || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
-# %%systemd_post %%{name}.service
-# manual expansion of systemd_post as this doesn't appear to
-# expand correctly on debian machines
-if [ $1 -eq 1 ] ; then
- # Initial installation
- systemctl preset %{name}.service >/dev/null 2>&1 || :
-fi
-systemctl daemon-reload
-%else
-/sbin/chkconfig --add %{name}
-%endif
-
-chmod -R o-rwx,g-w %{_localstatedir}/lib/rabbitmq/mnesia
-chgrp rabbitmq %{_sysconfdir}/rabbitmq
-
-# Restore permissions saved during %pre. See comment in %pre for the
-# reason behind this.
-if test -f /var/log/rabbitmq/permissions; then
- chmod "$(cat /var/log/rabbitmq/permissions)" /var/log/rabbitmq
- rm -f /var/log/rabbitmq/permissions
-fi
-
-if [ "$1" = 2 ] ; then
- # Upgrade:
- # Restart the service if it was running before the upgrade.
- if [ -f %{_localstatedir}/lib/rabbitmq/rabbitmq-running-before-upgrade ]; then
-%if 0%{?fedora} || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
- # %%systemd_postun_with_restart %%{name}.service
- # manual expansion of systemd_postun_with_restart as this doesn't appear to
- # expand correctly on debian machines
- systemctl restart %{name}.service >/dev/null 2>&1 || :
-%else
- /sbin/service %{name} restart
-%endif
- rm -f %{_localstatedir}/lib/rabbitmq/rabbitmq-running-before-upgrade
- fi
-fi
-
-if [ -n "$ZSH_VERSION" ]; then
- echo "Z Shell detected.
-to enable rabbitmqctl autocompletion add the following to your .zshrc file:
-autoload _enable_rabbitmqctl_completion; _enable_rabbitmqctl_completion"
-fi
-
-%preun
-
-if [ $1 = 0 ]; then
- #Complete uninstall
-%if 0%{?fedora} || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
- systemctl stop rabbitmq-server
-%else
- /sbin/service rabbitmq-server stop
- /sbin/chkconfig --del rabbitmq-server
-%endif
-
- # We do not remove /var/log and /var/lib directories
- # Leave rabbitmq user and group
-fi
-
-# Clean out plugin activation state, both on uninstall and upgrade
-rm -rf %{_plugins_state_dir}
-for ext in rel script boot ; do
- rm -f %{_rabbit_erllibdir}/ebin/rabbit.$ext
-done
-
-%postun
-
-%if 0%{?fedora} > 17 || 0%{?rhel} >= 7 || 0%{?suse_version} >= 1315
-# For prior versions older than this, do a conversion
-# from sysv to systemd
-%triggerun -- %{name} < 3.6.5
-# Save the current service runlevel info
-# User must manually run systemd-sysv-convert --apply opensips
-# to migrate them to systemd targets
-systemd-sysv-convert --save %{name} >/dev/null 2>&1 ||:
-
-# Run these because the SysV package being removed won't do them
-/sbin/chkconfig --del %{name} >/dev/null 2>&1 || :
-systemctl try-restart %{name}.service >/dev/null 2>&1 || :
-%endif
-
-%files -f ../%{name}.files
-%defattr(-,root,root,-)
-%attr(0755, rabbitmq, rabbitmq) %dir %{_localstatedir}/lib/rabbitmq
-%attr(0750, rabbitmq, rabbitmq) %dir %{_localstatedir}/lib/rabbitmq/mnesia
-%attr(0755, rabbitmq, rabbitmq) %dir %{_localstatedir}/log/rabbitmq
-%attr(2755, -, rabbitmq) %dir %{_sysconfdir}/rabbitmq
-
-%{_sysconfdir}/profile.d/rabbitmqctl-autocomplete.sh
-%{_datarootdir}/zsh/vendor-functions/_enable_rabbitmqctl_completion
-
-%config(noreplace) %{_sysconfdir}/logrotate.d/rabbitmq-server
-%doc LICENSE*
-%doc README
-%doc deps/rabbit/docs/set_rabbitmq_policy.sh.example
-
-%clean
-rm -rf %{buildroot}
-
-%changelog
-* Sun Sep 8 2019 info@rabbitmq.com 3.8.0~rc.1-1
-- New upstream release.
-
-* Tue Aug 20 2019 info@rabbitmq.com 3.8.0~beta.7-1
-- New upstream release.
-
-* Sun Aug 4 2019 info@rabbitmq.com 3.8.0~beta.6-1
-- New upstream release.
-
-* Tue Jul 9 2019 info@rabbitmq.com 3.8.0~beta.5-1
-- New upstream release.
-
-* Fri May 10 2019 info@rabbitmq.com 3.8.0~beta.4-1
-- New upstream release.
-
-* Wed Mar 6 2019 info@rabbitmq.com 3.8.0~beta.3-1
-- New upstream release.
-
-* Thu Jan 31 2019 info@rabbitmq.com 3.8.0~beta.2-1
-- New upstream release.
-
-* Thu Nov 8 2018 info@rabbitmq.com 3.8.0~beta.1-1
-- New upstream release.
-
-* Wed Dec 13 2017 info@rabbitmq.com 3.7.1~beta.1-1
-- New upstream release.
-
-* Wed Nov 29 2017 info@rabbitmq.com 3.7.0-1
-- New upstream release.
-
-* Thu Nov 16 2017 info@rabbitmq.com 3.7.0~rc.2-1
-- New upstream release.
-
-* Mon Sep 18 2017 info@rabbitmq.com 3.7.0~rc.1-1
-- New upstream release.
-
-* Tue Sep 12 2017 info@rabbitmq.com 3.7.0~beta.20-1
-- New upstream release.
-
-* Mon Sep 11 2017 michael@rabbitmq.com 3.6.12-1
-- New Upstream Release
-
-* Wed Aug 16 2017 michael@rabbitmq.com 3.6.11-1
-- New Upstream Release
-
-* Thu May 25 2017 michael@rabbitmq.com 3.6.10-1
-- New Upstream Release
-
-* Wed Mar 29 2017 michael@rabbitmq.com 3.6.9-1
-- New Upstream Release
-
-* Fri Mar 17 2017 michael@rabbitmq.com 3.6.8-1
-- New Upstream Release
-
-* Wed Mar 15 2017 michael@rabbitmq.com 3.6.7-1
-- New Upstream Release
-
-* Mon Nov 21 2016 michael@rabbitmq.com 3.6.6-1
-- New Upstream Release
-
-* Fri Aug 5 2016 michael@rabbitmq.com 3.6.5-1
-- New Upstream Release
-
-* Fri Jul 29 2016 michael@rabbitmq.com 3.6.4-1
-- New Upstream Release
-
-* Wed Jul 6 2016 michael@rabbitmq.com 3.6.3-1
-- New Upstream Release
-
-* Thu May 19 2016 michael@rabbitmq.com 3.6.2-1
-- New Upstream Release
-
-* Tue Mar 1 2016 michael@rabbitmq.com 3.6.1-1
-- New Upstream Release
-
-* Tue Dec 22 2015 michael@rabbitmq.com 3.6.0-1
-- New Upstream Release
-
-* Tue Dec 15 2015 michael@rabbitmq.com 3.5.7-1
-- New Upstream Release
-
-* Wed Oct 7 2015 michael@rabbitmq.com 3.5.6-1
-- New Upstream Release
-
-* Thu Sep 24 2015 jean-sebastien@rabbitmq.com 3.5.5-3
-- Fix bashism in rabbitmq-script-wrapper
-
-* Thu Sep 24 2015 jean-sebastien@rabbitmq.com 3.5.5-1
-- New Upstream Release
-
-* Tue Jul 21 2015 michael@rabbitmq.com 3.5.4-1
-- New Upstream Release
-
-* Fri May 22 2015 jean-sebastien@rabbitmq.com 3.5.3-1
-- New Upstream Release
-
-* Tue May 12 2015 jean-sebastien@rabbitmq.com 3.5.2-1
-- New Upstream Release
-
-* Thu Apr 2 2015 michael@rabbitmq.com 3.5.1-1
-- New Upstream Release
-
-* Wed Mar 11 2015 jean-sebastien@rabbitmq.com 3.5.0-1
-- New Upstream Release
-
-* Wed Feb 11 2015 michael@rabbitmq.com 3.4.4-1
-- New Upstream Release
-
-* Tue Jan 6 2015 jean-sebastien@rabbitmq.com 3.4.3-1
-- New Upstream Release
-
-* Wed Nov 26 2014 simon@rabbitmq.com 3.4.2-1
-- New Upstream Release
-
-* Wed Oct 29 2014 simon@rabbitmq.com 3.4.1-1
-- New Upstream Release
-
-* Tue Oct 21 2014 simon@rabbitmq.com 3.4.0-1
-- New Upstream Release
-
-* Mon Aug 11 2014 simon@rabbitmq.com 3.3.5-1
-- New Upstream Release
-
-* Tue Jun 24 2014 simon@rabbitmq.com 3.3.4-1
-- New Upstream Release
-
-* Mon Jun 16 2014 simon@rabbitmq.com 3.3.3-1
-- New Upstream Release
-
-* Mon Jun 9 2014 simon@rabbitmq.com 3.3.2-1
-- New Upstream Release
-
-* Tue Apr 29 2014 simon@rabbitmq.com 3.3.1-1
-- New Upstream Release
-
-* Wed Apr 2 2014 simon@rabbitmq.com 3.3.0-1
-- New Upstream Release
-
-* Mon Mar 3 2014 simon@rabbitmq.com 3.2.4-1
-- New Upstream Release
-
-* Thu Jan 23 2014 emile@rabbitmq.com 3.2.3-1
-- New Upstream Release
-
-* Tue Dec 10 2013 emile@rabbitmq.com 3.2.2-1
-- New Upstream Release
-
-* Wed Oct 23 2013 emile@rabbitmq.com 3.2.0-1
-- New Upstream Release
-
-* Thu Aug 15 2013 simon@rabbitmq.com 3.1.5-1
-- New Upstream Release
-
-* Tue Jun 25 2013 tim@rabbitmq.com 3.1.3-1
-- New Upstream Release
-
-* Mon Jun 24 2013 tim@rabbitmq.com 3.1.2-1
-- New Upstream Release
-
-* Mon May 20 2013 tim@rabbitmq.com 3.1.1-1
-- Test release
-
-* Wed May 1 2013 simon@rabbitmq.com 3.1.0-1
-- New Upstream Release
-
-* Tue Dec 11 2012 simon@rabbitmq.com 3.0.1-1
-- New Upstream Release
-
-* Fri Nov 16 2012 simon@rabbitmq.com 3.0.0-1
-- New Upstream Release
-
-* Fri Dec 16 2011 steve@rabbitmq.com 2.7.1-1
-- New Upstream Release
-
-* Tue Nov 8 2011 steve@rabbitmq.com 2.7.0-1
-- New Upstream Release
-
-* Fri Sep 9 2011 tim@rabbitmq.com 2.6.1-1
-- New Upstream Release
-
-* Fri Aug 26 2011 tim@rabbitmq.com 2.6.0-1
-- New Upstream Release
-
-* Mon Jun 27 2011 simon@rabbitmq.com 2.5.1-1
-- New Upstream Release
-
-* Thu Jun 9 2011 jerryk@vmware.com 2.5.0-1
-- New Upstream Release
-
-* Thu Apr 7 2011 Alexandru Scvortov <alexandru@rabbitmq.com> 2.4.1-1
-- New Upstream Release
-
-* Tue Mar 22 2011 Alexandru Scvortov <alexandru@rabbitmq.com> 2.4.0-1
-- New Upstream Release
-
-* Thu Feb 3 2011 simon@rabbitmq.com 2.3.1-1
-- New Upstream Release
-
-* Tue Feb 1 2011 simon@rabbitmq.com 2.3.0-1
-- New Upstream Release
-
-* Mon Nov 29 2010 rob@rabbitmq.com 2.2.0-1
-- New Upstream Release
-
-* Tue Oct 19 2010 vlad@rabbitmq.com 2.1.1-1
-- New Upstream Release
-
-* Tue Sep 14 2010 marek@rabbitmq.com 2.1.0-1
-- New Upstream Release
-
-* Mon Aug 23 2010 mikeb@rabbitmq.com 2.0.0-1
-- New Upstream Release
-
-* Wed Jul 14 2010 Emile Joubert <emile@rabbitmq.com> 1.8.1-1
-- New Upstream Release
-
-* Tue Jun 15 2010 Matthew Sackman <matthew@rabbitmq.com> 1.8.0-1
-- New Upstream Release
-
-* Mon Feb 15 2010 Matthew Sackman <matthew@lshift.net> 1.7.2-1
-- New Upstream Release
-
-* Fri Jan 22 2010 Matthew Sackman <matthew@lshift.net> 1.7.1-1
-- New Upstream Release
-
-* Mon Oct 5 2009 David Wragg <dpw@lshift.net> 1.7.0-1
-- New upstream release
-
-* Wed Jun 17 2009 Matthias Radestock <matthias@lshift.net> 1.6.0-1
-- New upstream release
-
-* Tue May 19 2009 Matthias Radestock <matthias@lshift.net> 1.5.5-1
-- Maintenance release for the 1.5.x series
-
-* Mon Apr 6 2009 Matthias Radestock <matthias@lshift.net> 1.5.4-1
-- Maintenance release for the 1.5.x series
-
-* Tue Feb 24 2009 Tony Garnock-Jones <tonyg@lshift.net> 1.5.3-1
-- Maintenance release for the 1.5.x series
-
-* Mon Feb 23 2009 Tony Garnock-Jones <tonyg@lshift.net> 1.5.2-1
-- Maintenance release for the 1.5.x series
-
-* Mon Jan 19 2009 Ben Hood <0x6e6562@gmail.com> 1.5.1-1
-- Maintenance release for the 1.5.x series
-
-* Wed Dec 17 2008 Matthias Radestock <matthias@lshift.net> 1.5.0-1
-- New upstream release
-
-* Thu Jul 24 2008 Tony Garnock-Jones <tonyg@lshift.net> 1.4.0-1
-- New upstream release
-
-* Mon Mar 3 2008 Adrien Pierard <adrian@lshift.net> 1.3.0-1
-- New upstream release
-
-* Wed Sep 26 2007 Simon MacMullen <simon@lshift.net> 1.2.0-1
-- New upstream release
-
-* Wed Aug 29 2007 Simon MacMullen <simon@lshift.net> 1.1.1-1
-- New upstream release
-
-* Mon Jul 30 2007 Simon MacMullen <simon@lshift.net> 1.1.0-1.alpha
-- New upstream release
-
-* Tue Jun 12 2007 Hubert Plociniczak <hubert@lshift.net> 1.0.0-1.20070607
-- Building from source tarball, added starting script, stopping
-
-* Mon May 21 2007 Hubert Plociniczak <hubert@lshift.net> 1.0.0-1.alpha
-- Initial build of server library of RabbitMQ package
diff --git a/packaging/RPMS/Fedora/rabbitmq-server.tmpfiles b/packaging/RPMS/Fedora/rabbitmq-server.tmpfiles
deleted file mode 100644
index c2681827e0..0000000000
--- a/packaging/RPMS/Fedora/rabbitmq-server.tmpfiles
+++ /dev/null
@@ -1 +0,0 @@
-D /var/run/rabbitmq 0755 rabbitmq rabbitmq -
diff --git a/packaging/RPMS/Fedora/scripts/compare-rpm-versions.py b/packaging/RPMS/Fedora/scripts/compare-rpm-versions.py
deleted file mode 100755
index 874af0281b..0000000000
--- a/packaging/RPMS/Fedora/scripts/compare-rpm-versions.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python2
-
-import os.path
-import rpm
-import sys
-
-if len(sys.argv) != 3:
- print('Syntax: %s <version a> <version b>' % (os.path.basename(sys.argv[0])))
- sys.exit(64)
-
-a = sys.argv[1]
-b = sys.argv[2]
-
-def parse_rpm_version(v):
- splitted = v.split(':', 1)
- try:
- epoch = splitted[0]
- v = splitted[1]
- except IndexError:
- epoch = '0'
-
- splitted = v.split('-', 1)
- version = splitted[0]
- try:
- release = splitted[1]
- except IndexError:
- release = ''
-
- return (epoch, version, release)
-
-a_parsed = parse_rpm_version(a)
-b_parsed = parse_rpm_version(b)
-
-vc = rpm.labelCompare(a_parsed, b_parsed)
-
-if vc > 0:
- print('%s < %s' % (b, a))
-elif vc == 0:
- print('%s = %s' % (a, b))
-elif vc < 0:
- print('%s < %s' % (a, b))
diff --git a/packaging/RPMS/Fedora/scripts/format-package-version b/packaging/RPMS/Fedora/scripts/format-package-version
deleted file mode 100755
index 9239b53af9..0000000000
--- a/packaging/RPMS/Fedora/scripts/format-package-version
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-# vim:sw=2:et:
-
-set -e
-
-VERSION=$1
-
-echo "$VERSION" | sed -E -e 's/-/~/g'
diff --git a/packaging/RPMS/Fedora/scripts/parse-changelog.sh b/packaging/RPMS/Fedora/scripts/parse-changelog.sh
deleted file mode 100755
index 35eeddcee2..0000000000
--- a/packaging/RPMS/Fedora/scripts/parse-changelog.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-
-set -e
-
-awk '
-/^%changelog/ {
- in_changelog = 1;
- next;
-}
-{
- if (in_changelog) {
- print;
- }
-}' "$@"
diff --git a/packaging/RPMS/Fedora/scripts/update-changelog.sh b/packaging/RPMS/Fedora/scripts/update-changelog.sh
deleted file mode 100755
index 52043dc69a..0000000000
--- a/packaging/RPMS/Fedora/scripts/update-changelog.sh
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/sh
-# vim:sw=2:et:
-
-set -e
-
-case $# in
- 1)
- SPEC=$(dirname "$0")/../rabbitmq-server.spec
- PACKAGE_VERSION=$1
- ;;
- 2)
- SPEC=$1
- PACKAGE_VERSION=$2
- ;;
-esac
-
-SCRIPT=$(basename "$0")
-SCRIPTS_DIR=$(dirname "$0")
-
-if test -z "$SPEC" -o ! -f "$SPEC" -o -z "$PACKAGE_VERSION"; then
- echo "Syntax: $SCRIPT [<spec file>] <rpm version>" 1>&2
- exit 64
-fi
-
-if "$SCRIPTS_DIR/parse-changelog.sh" "$SPEC" | \
- grep -E -q "^\*.+ ${PACKAGE_VERSION}-[^ ]+$"; then
- exit 0
-fi
-
-CHANGELOG_PKG_REV=1
-CHANGELOG_EMAIL='info@rabbitmq.com'
-CHANGELOG_COMMENT='New upstream release.'
-
-awk "
-/^Release:/ {
- if (!release_modified) {
- release = \$0;
- sub(/[0-9]+/, \"${CHANGELOG_PKG_REV}\", release);
- print release;
- release_modified = 1;
- next;
- }
-}
-/^%changelog/ {
- print;
- print \"* $(date +'%a %b %-d %Y') ${CHANGELOG_EMAIL} ${PACKAGE_VERSION}-${CHANGELOG_PKG_REV}\";
- print \"- ${CHANGELOG_COMMENT}\";
- print \"\";
- next;
-}
-{
- print;
-}
-" < "$SPEC" > "$SPEC.updated"
-
-mv "$SPEC.updated" "$SPEC"
diff --git a/packaging/debs/Debian/.gitignore b/packaging/debs/Debian/.gitignore
deleted file mode 100644
index 6a4aec11b5..0000000000
--- a/packaging/debs/Debian/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-/debian/postrm
-/debian/stamp-makefile-build
-/rabbitmq-server_*
diff --git a/packaging/debs/Debian/Makefile b/packaging/debs/Debian/Makefile
deleted file mode 100644
index 010293fc65..0000000000
--- a/packaging/debs/Debian/Makefile
+++ /dev/null
@@ -1,128 +0,0 @@
-SOURCE_DIST_FILE ?= $(wildcard ../../../rabbitmq-server-*.tar.xz)
-
-ifneq ($(filter-out clean,$(MAKECMDGOALS)),)
-ifeq ($(SOURCE_DIST_FILE),)
-$(error Cannot find source archive; please specify SOURCE_DIST_FILE)
-endif
-ifneq ($(words $(SOURCE_DIST_FILE)),1)
-$(error Multiple source archives found; please specify SOURCE_DIST_FILE)
-endif
-endif
-
-VERSION ?= $(patsubst rabbitmq-server-%.tar.xz,%,$(notdir $(SOURCE_DIST_FILE)))
-ifeq ($(VERSION),)
-$(error Cannot determine version; please specify VERSION)
-endif
-
-# $(DEBIAN_VERSION) doesn't include the package revision: this one is
-# only set in debian/changelog.
-DEBIAN_VERSION = $(subst -,~,$(VERSION))
-DEBIAN_ORIG_TARBALL = rabbitmq-server_$(DEBIAN_VERSION).orig.tar.xz
-UNPACKED_DIR = $(patsubst %.tar.xz,%,$(notdir $(SOURCE_DIST_FILE)))
-
-DEB_HOST_ARCH = $(shell dpkg-architecture -qDEB_HOST_ARCH)
-CHANGES_FILE = rabbitmq-server_$(DEBIAN_VERSION)-*_$(DEB_HOST_ARCH).changes
-
-# Package signing.
-#
-# At least the key ID is mandatory ($(SIGNING_KEY)). If it's set, we
-# enable signing in dpkg-build-package(1), otherwise we ask for an
-# unsigned package.
-#
-# To maintain backward compatibility, the caller can also specify
-# $(KEYSDIR) or $(GNUPG_PATH) and we set GNUPGHOME accordingly.
-
-ifneq ($(SIGNING_KEY),)
- SIGNING_FLAG = -k$(SIGNING_KEY)
-ifneq ($(KEYSDIR),)
- GNUPGHOME = $(KEYSDIR)/keyring/.gnupg
- export GNUPGHOME
-endif
-ifneq ($(GNUPG_PATH),)
- GNUPGHOME = $(GNUPG_PATH)/.gnupg
- export GNUPGHOME
-endif
-else
- SIGNING_FLAG = -us -uc
-endif
-
-unexport DEPS_DIR
-unexport ERL_LIBS
-MAKEOVERRIDES =
-
-.PHONY: all package clean
-
-all: package
- @:
-
-package: clean
-# If a signing key ID was specified, verify that the key is available
-# before starting a possibly long build. At the same time, display some
-# useful informations on the key so the caller can double-check if he
-# wants.
-ifneq ($(SIGNING_KEY),)
- @echo
- @echo '--------------------------------------------------'
- @echo "The package will be signed with key $(SIGNING_KEY):"
- @gpg -K "$(SIGNING_KEY)"
- @echo '--------------------------------------------------'
- @echo
-endif
-# Because we are creating a source package as well, Debian expects the
-# source archive to have a specially formatted name. Copy the original
-# archive to a correctly named file.
- cp -a "$(SOURCE_DIST_FILE)" "$(DEBIAN_ORIG_TARBALL)"
-# Prepare the source directory: we extract the source archive and copy
-# the debian/ subdirectory.
- xzcat "$(DEBIAN_ORIG_TARBALL)" | tar -xf -
- cp -a debian "$(UNPACKED_DIR)/debian"
-ifeq ($(INSTALL_BUILD_DEPS),yes)
-# Install build dependencies. To help us, we use mk-build-deps(1) from
-# the devscripts package.
-#
-# We ignore errors from the first `dpkg -i` because the command will
-# fail: dependencies are missing and dpkg(8) doesn't install them.
-# That's why we have `apt-get install -f` following. To double-check
-# everything went fine, we have the second `dpkg -i`.
- apt-get install -y --no-install-recommends devscripts equivs
- mk-build-deps debian/control
- -dpkg -i ./rabbitmq-server-build-deps_*_all.deb
- apt-get install -y -f -V --no-install-recommends
- dpkg -i ./rabbitmq-server-build-deps_*_all.deb
-endif
-# Log Erlang version.
- @echo
- @echo '--------------------------------------------------'
- @echo "Erlang and Elixir versions used to compile:"
- @elixir --version
- @echo '--------------------------------------------------'
- @echo
-# Possibly update debian/changelog (in the created source directory):
-# - if it contains an entry for the specified version, do nothing;
-# - otherwise, prepend a generated entry using "1" as the package
-# revision.
- cd "$(UNPACKED_DIR)"; \
- ../scripts/update-changelog.sh "$(DEBIAN_VERSION)"
-# Finally build the package! We ask for both the source package and one
-# or more binary packages.
- cd "$(UNPACKED_DIR)"; \
- dpkg-buildpackage -sa $(SIGNING_FLAG)
-# Before we remove the source directory, copy the possibly updated
-# debian/changelog to the original debian subdirectory, if the caller
-# asks for it. He is then responsible for committing it.
-ifeq ($(SAVE_CHANGELOG),yes)
- cp -a "$(UNPACKED_DIR)/debian/changelog" debian/changelog
-endif
- rm -rf "$(UNPACKED_DIR)"
-# If $(PACKAGES_DIR) is specified, move all package files to that
-# location.
-ifneq ($(PACKAGES_DIR),)
- mkdir -p "$(PACKAGES_DIR)"
- mv $$(./scripts/get-debian-package-files-list.sh $(CHANGES_FILE)) \
- "$(PACKAGES_DIR)"
-endif
-
-clean:
- rm -rf $(UNPACKED_DIR)
- rm -f $(DEBIAN_ORIG_TARBALL)
- ./scripts/get-debian-package-files-list.sh $(CHANGES_FILE) | xargs rm -f
diff --git a/packaging/debs/Debian/debian/changelog b/packaging/debs/Debian/debian/changelog
deleted file mode 100644
index f3ba52425d..0000000000
--- a/packaging/debs/Debian/debian/changelog
+++ /dev/null
@@ -1,547 +0,0 @@
-rabbitmq-server (3.8.0~rc.1-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Sun, 08 Sep 2019 20:26:33 +0000
-
-rabbitmq-server (3.8.0~beta.7-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Tue, 20 Aug 2019 19:44:41 +0000
-
-rabbitmq-server (3.8.0~beta.6-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Sun, 04 Aug 2019 00:18:13 +0000
-
-rabbitmq-server (3.8.0~beta.5-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Tue, 09 Jul 2019 16:11:20 +0000
-
-rabbitmq-server (3.8.0~beta.4-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Fri, 10 May 2019 14:13:38 +0000
-
-rabbitmq-server (3.8.0~beta.3-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Wed, 06 Mar 2019 05:08:03 +0000
-
-rabbitmq-server (3.8.0~beta.2-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Thu, 31 Jan 2019 10:16:50 +0000
-
-rabbitmq-server (3.8.0~beta.1-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Thu, 08 Nov 2018 14:41:04 +0000
-
-rabbitmq-server (3.7.1~beta.1-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Wed, 13 Dec 2017 18:07:55 +0000
-
-rabbitmq-server (3.7.0-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Wed, 29 Nov 2017 16:52:39 +0000
-
-rabbitmq-server (3.7.0~rc.2-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Thu, 16 Nov 2017 17:50:24 +0000
-
-rabbitmq-server (3.7.0~rc.1-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Mon, 18 Sep 2017 14:43:34 +0000
-
-rabbitmq-server (3.7.0~beta.20-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> Tue, 12 Sep 2017 07:49:21 +0000
-
-rabbitmq-server (3.6.12-1) unstable; urgency=medium
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Mon, 11 Sep 2017 16:19:10 +0100
-
-rabbitmq-server (3.6.11-1) unstable; urgency=medium
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Wed, 16 Aug 2017 13:06:42 +0100
-
-rabbitmq-server (3.6.10-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Thu, 25 May 2017 11:46:23 +0100
-
-rabbitmq-server (3.6.9-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Wed, 29 Mar 2017 10:13:44 +0100
-
-rabbitmq-server (3.6.8-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Fri, 17 Mar 2017 12:04:45 +0000
-
-rabbitmq-server (3.6.7-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Wed, 15 Mar 2017 09:11:30 +0000
-
-rabbitmq-server (3.6.6-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Mon, 21 Nov 2016 10:36:28 +0000
-
-rabbitmq-server (3.6.5-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Fri, 05 Aug 2016 14:20:47 +0100
-
-rabbitmq-server (3.6.4-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Fri, 29 Jul 2016 11:40:53 +0100
-
-rabbitmq-server (3.6.3-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Wed, 06 Jul 2016 19:19:21 +0100
-
-rabbitmq-server (3.6.2-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Thu, 19 May 2016 09:20:06 +0100
-
-rabbitmq-server (3.6.1-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Tue, 01 Mar 2016 13:19:57 +0000
-
-rabbitmq-server (3.6.0-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Tue, 22 Dec 2015 13:21:56 +0000
-
-rabbitmq-server (3.5.7-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Tue, 15 Dec 2015 10:10:46 +0000
-
-rabbitmq-server (3.5.6-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Wed, 07 Oct 2015 13:31:24 +0100
-
-rabbitmq-server (3.5.5-3) unstable; urgency=low
-
- * Fix bashism in rabbitmq-script-wrapper
-
- -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> Thu, 24 Sep 2015 19:18:17 +0100
-
-rabbitmq-server (3.5.5-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> Thu, 24 Sep 2015 10:57:25 +0100
-
-rabbitmq-server (3.5.4-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Tue, 21 Jul 2015 20:25:48 +0100
-
-rabbitmq-server (3.5.3-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> Fri, 22 May 2015 11:04:17 +0100
-
-rabbitmq-server (3.5.2-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> Tue, 12 May 2015 16:21:44 +0100
-
-rabbitmq-server (3.5.1-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Thu, 02 Apr 2015 10:17:30 +0100
-
-rabbitmq-server (3.5.0-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> Wed, 11 Mar 2015 13:56:19 +0000
-
-rabbitmq-server (3.4.4-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Michael Klishin <michael@rabbitmq.com> Wed, 11 Feb 2015 12:05:01 +0000
-
-rabbitmq-server (3.4.3-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> Tue, 06 Jan 2015 15:58:45 +0000
-
-rabbitmq-server (3.4.2-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Wed, 26 Nov 2014 12:11:12 +0000
-
-rabbitmq-server (3.4.1-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Wed, 29 Oct 2014 13:31:10 +0000
-
-rabbitmq-server (3.4.0-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Tue, 21 Oct 2014 14:21:36 +0100
-
-rabbitmq-server (3.3.5-1) unstable; urgency=low
-
- * New Upstream Release
- * Changed Uploaders from Emile Joubert to Blair Hester
-
- -- Simon MacMullen <simon@rabbitmq.com> Mon, 11 Aug 2014 12:23:31 +0100
-
-rabbitmq-server (3.3.4-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Tue, 24 Jun 2014 12:50:29 +0100
-
-rabbitmq-server (3.3.3-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Mon, 16 Jun 2014 13:00:00 +0100
-
-rabbitmq-server (3.3.2-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Mon, 09 Jun 2014 10:25:22 +0100
-
-rabbitmq-server (3.3.1-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Tue, 29 Apr 2014 11:49:23 +0100
-
-rabbitmq-server (3.3.0-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Wed, 02 Apr 2014 14:23:14 +0100
-
-rabbitmq-server (3.2.4-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Mon, 03 Mar 2014 14:50:18 +0000
-
-rabbitmq-server (3.2.3-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Emile Joubert <emile@rabbitmq.com> Thu, 23 Jan 2014 14:46:37 +0000
-
-rabbitmq-server (3.2.2-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Emile Joubert <emile@rabbitmq.com> Tue, 10 Dec 2013 16:08:08 +0000
-
-rabbitmq-server (3.2.0-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Emile Joubert <emile@rabbitmq.com> Wed, 23 Oct 2013 12:44:10 +0100
-
-rabbitmq-server (3.1.5-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Thu, 15 Aug 2013 11:03:13 +0100
-
-rabbitmq-server (3.1.3-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Tim Watson <tim@rabbitmq.com> Tue, 25 Jun 2013 15:01:12 +0100
-
-rabbitmq-server (3.1.2-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Tim Watson <tim@rabbitmq.com> Mon, 24 Jun 2013 11:16:41 +0100
-
-rabbitmq-server (3.1.1-1) unstable; urgency=low
-
- * Test release
-
- -- Tim Watson <tim@rabbitmq.com> Mon, 20 May 2013 16:21:20 +0100
-
-rabbitmq-server (3.1.0-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Wed, 01 May 2013 11:57:58 +0100
-
-rabbitmq-server (3.0.1-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Tue, 11 Dec 2012 11:29:55 +0000
-
-rabbitmq-server (3.0.0-1) unstable; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Fri, 16 Nov 2012 14:15:29 +0000
-
-rabbitmq-server (2.7.1-1) natty; urgency=low
-
- * New Upstream Release
-
- -- Steve Powell <steve@rabbitmq.com> Fri, 16 Dec 2011 12:12:36 +0000
-
-rabbitmq-server (2.7.0-1) natty; urgency=low
-
- * New Upstream Release
-
- -- Steve Powell <steve@rabbitmq.com> Tue, 08 Nov 2011 16:47:50 +0000
-
-rabbitmq-server (2.6.1-1) natty; urgency=low
-
- * New Upstream Release
-
- -- Tim <tim@rabbitmq.com> Fri, 09 Sep 2011 14:38:45 +0100
-
-rabbitmq-server (2.6.0-1) natty; urgency=low
-
- * New Upstream Release
-
- -- Tim <tim@rabbitmq.com> Fri, 26 Aug 2011 16:29:40 +0100
-
-rabbitmq-server (2.5.1-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Mon, 27 Jun 2011 11:21:49 +0100
-
-rabbitmq-server (2.5.0-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- <jerryk@vmware.com> Thu, 09 Jun 2011 07:20:29 -0700
-
-rabbitmq-server (2.4.1-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Alexandru Scvortov <alexandru@rabbitmq.com> Thu, 07 Apr 2011 16:49:22 +0100
-
-rabbitmq-server (2.4.0-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Alexandru Scvortov <alexandru@rabbitmq.com> Tue, 22 Mar 2011 17:34:31 +0000
-
-rabbitmq-server (2.3.1-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Thu, 03 Feb 2011 12:43:56 +0000
-
-rabbitmq-server (2.3.0-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@rabbitmq.com> Tue, 01 Feb 2011 12:52:16 +0000
-
-rabbitmq-server (2.2.0-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Rob Harrop <rob@rabbitmq.com> Mon, 29 Nov 2010 12:24:48 +0000
-
-rabbitmq-server (2.1.1-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Vlad Alexandru Ionescu <vlad@rabbitmq.com> Tue, 19 Oct 2010 17:20:10 +0100
-
-rabbitmq-server (2.1.0-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Marek Majkowski <marek@rabbitmq.com> Tue, 14 Sep 2010 14:20:17 +0100
-
-rabbitmq-server (2.0.0-1) karmic; urgency=low
-
- * New Upstream Release
-
- -- Michael Bridgen <mikeb@rabbitmq.com> Mon, 23 Aug 2010 14:55:39 +0100
-
-rabbitmq-server (1.8.1-1) lucid; urgency=low
-
- * New Upstream Release
-
- -- Emile Joubert <emile@rabbitmq.com> Wed, 14 Jul 2010 15:05:24 +0100
-
-rabbitmq-server (1.8.0-1) intrepid; urgency=low
-
- * New Upstream Release
-
- -- Matthew Sackman <matthew@rabbitmq.com> Tue, 15 Jun 2010 12:48:48 +0100
-
-rabbitmq-server (1.7.2-1) intrepid; urgency=low
-
- * New Upstream Release
-
- -- Matthew Sackman <matthew@lshift.net> Mon, 15 Feb 2010 15:54:47 +0000
-
-rabbitmq-server (1.7.1-1) intrepid; urgency=low
-
- * New Upstream Release
-
- -- Matthew Sackman <matthew@lshift.net> Fri, 22 Jan 2010 14:14:29 +0000
-
-rabbitmq-server (1.7.0-1) intrepid; urgency=low
-
- * New Upstream Release
-
- -- David Wragg <dpw@lshift.net> Mon, 05 Oct 2009 13:44:41 +0100
-
-rabbitmq-server (1.6.0-1) hardy; urgency=low
-
- * New Upstream Release
-
- -- Matthias Radestock <matthias@lshift.net> Tue, 16 Jun 2009 15:02:58 +0100
-
-rabbitmq-server (1.5.5-1) hardy; urgency=low
-
- * New Upstream Release
-
- -- Matthias Radestock <matthias@lshift.net> Tue, 19 May 2009 09:57:54 +0100
-
-rabbitmq-server (1.5.4-1) hardy; urgency=low
-
- * New Upstream Release
-
- -- Matthias Radestock <matthias@lshift.net> Mon, 06 Apr 2009 09:19:32 +0100
-
-rabbitmq-server (1.5.3-1) hardy; urgency=low
-
- * New Upstream Release
-
- -- Tony Garnock-Jones <tonyg@lshift.net> Tue, 24 Feb 2009 18:23:33 +0000
-
-rabbitmq-server (1.5.2-1) hardy; urgency=low
-
- * New Upstream Release
-
- -- Tony Garnock-Jones <tonyg@lshift.net> Mon, 23 Feb 2009 16:03:38 +0000
-
-rabbitmq-server (1.5.1-1) hardy; urgency=low
-
- * New Upstream Release
-
- -- Simon MacMullen <simon@lshift.net> Mon, 19 Jan 2009 15:46:13 +0000
-
-rabbitmq-server (1.5.0-1) testing; urgency=low
-
- * New Upstream Release
-
- -- Matthias Radestock <matthias@lshift.net> Wed, 17 Dec 2008 18:23:47 +0000
-
-rabbitmq-server (1.4.0-1) testing; urgency=low
-
- * New Upstream Release
-
- -- Tony Garnock-Jones <tonyg@lshift.net> Thu, 24 Jul 2008 13:21:48 +0100
-
-rabbitmq-server (1.3.0-1) testing; urgency=low
-
- * New Upstream Release
-
- -- Adrien Pierard <adrien@lshift.net> Mon, 03 Mar 2008 15:34:38 +0000
-
-rabbitmq-server (1.2.0-2) testing; urgency=low
-
- * Fixed rabbitmqctl wrapper script
-
- -- Simon MacMullen <simon@lshift.net> Fri, 05 Oct 2007 11:55:00 +0100
-
-rabbitmq-server (1.2.0-1) testing; urgency=low
-
- * New upstream release
-
- -- Simon MacMullen <simon@lshift.net> Wed, 26 Sep 2007 11:49:26 +0100
-
-rabbitmq-server (1.1.1-1) testing; urgency=low
-
- * New upstream release
-
- -- Simon MacMullen <simon@lshift.net> Wed, 29 Aug 2007 12:03:15 +0100
-
-rabbitmq-server (1.1.0-alpha-2) testing; urgency=low
-
- * Fixed erlang-nox dependency
-
- -- Simon MacMullen <simon@lshift.net> Thu, 02 Aug 2007 11:27:13 +0100
-
-rabbitmq-server (1.1.0-alpha-1) testing; urgency=low
-
- * New upstream release
-
- -- Simon MacMullen <simon@lshift.net> Fri, 20 Jul 2007 18:17:33 +0100
-
-rabbitmq-server (1.0.0-alpha-1) unstable; urgency=low
-
- * Initial release
-
- -- Tony Garnock-Jones <tonyg@shortstop.lshift.net> Wed, 31 Jan 2007 19:06:33 +0000
-
diff --git a/packaging/debs/Debian/debian/compat b/packaging/debs/Debian/debian/compat
deleted file mode 100644
index ec635144f6..0000000000
--- a/packaging/debs/Debian/debian/compat
+++ /dev/null
@@ -1 +0,0 @@
-9
diff --git a/packaging/debs/Debian/debian/control b/packaging/debs/Debian/debian/control
deleted file mode 100644
index 31d1429293..0000000000
--- a/packaging/debs/Debian/debian/control
+++ /dev/null
@@ -1,89 +0,0 @@
-Source: rabbitmq-server
-Section: net
-Priority: extra
-Maintainer: RabbitMQ Team <info@rabbitmq.com>
-Uploaders: Michael Klishin <michael@rabbitmq.com>,
- Karl Nilsson <knilsson@rabbitmq.com>,
- Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
-Build-Depends: debhelper (>= 9),
- dh-systemd (>= 1.5),
- erlang-base (>= 1:22.3) | erlang-base-hipe (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-base (<< 1:24.0) | erlang-base-hipe (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-crypto (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-crypto (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-edoc (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-edoc (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-eldap (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-eldap (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-erl-docgen (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-erl-docgen (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-eunit (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-eunit (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-inets (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-inets (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-mnesia (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-mnesia (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-os-mon (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-os-mon (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-parsetools (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-parsetools (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-public-key (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-public-key (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-runtime-tools (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-runtime-tools (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-ssl (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-ssl (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-syntax-tools (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-syntax-tools (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-tools (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-tools (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-xmerl (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-xmerl (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-dev (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-dev (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-src (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-src (<< 1:24.0) | esl-erlang (<< 1:24.0),
- elixir (>= 1.10.4),
- zip,
- rsync
-Standards-Version: 3.9.6
-
-Package: rabbitmq-server
-Architecture: all
-# FIXME: We remove ${misc:Depends} and hard-code the dependency on
-# init-system-helpers. we need this because we build the package on
-# Debian, where dh-systemd/init-system-helpers is at version 1.18, but
-# Ubuntu 14.04 only has init-system-helpers 1.14.
-Depends: init-system-helpers (>= 1.13~),
- erlang-base (>= 1:22.3) | erlang-base-hipe (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-base (<< 1:24.0) | erlang-base-hipe (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-crypto (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-crypto (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-eldap (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-eldap (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-inets (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-inets (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-mnesia (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-mnesia (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-os-mon (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-os-mon (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-parsetools (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-parsetools (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-public-key (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-public-key (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-runtime-tools (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-runtime-tools (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-ssl (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-ssl (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-syntax-tools (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-syntax-tools (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-tools (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-tools (<< 1:24.0) | esl-erlang (<< 1:24.0),
- erlang-xmerl (>= 1:22.3) | esl-erlang (>= 1:22.3),
- erlang-xmerl (<< 1:24.0) | esl-erlang (<< 1:24.0),
- adduser,
- logrotate,
- socat
-Description: Multi-protocol messaging broker
- RabbitMQ is an open source multi-protocol messaging broker.
-Homepage: https://www.rabbitmq.com/
diff --git a/packaging/debs/Debian/debian/dirs b/packaging/debs/Debian/debian/dirs
deleted file mode 100644
index 6df23a5f85..0000000000
--- a/packaging/debs/Debian/debian/dirs
+++ /dev/null
@@ -1,9 +0,0 @@
-usr/lib/rabbitmq/bin
-usr/lib/erlang/lib
-usr/sbin
-usr/share/man
-var/lib/rabbitmq/mnesia
-var/log/rabbitmq
-etc/logrotate.d
-etc/profile.d
-etc/rabbitmq
diff --git a/packaging/debs/Debian/debian/postinst b/packaging/debs/Debian/debian/postinst
deleted file mode 100644
index 379608aa8b..0000000000
--- a/packaging/debs/Debian/debian/postinst
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/bin/sh
-# postinst script for rabbitmq
-#
-# see: dh_installdeb(1)
-
-set -e
-
-# summary of how this script can be called:
-# * <postinst> `configure' <most-recently-configured-version>
-# * <old-postinst> `abort-upgrade' <new version>
-# * <conflictor's-postinst> `abort-remove' `in-favour' <package>
-# <new-version>
-# * <postinst> `abort-remove'
-# * <deconfigured's-postinst> `abort-deconfigure' `in-favour'
-# <failed-install-package> <version> `removing'
-# <conflicting-package> <version>
-# for details, see https://www.debian.org/doc/debian-policy/ or
-# the debian-policy package
-
-
-# create rabbitmq group
-if ! getent group rabbitmq >/dev/null; then
- addgroup --system rabbitmq
-fi
-
-# create rabbitmq user
-if ! getent passwd rabbitmq >/dev/null; then
- adduser --system --ingroup rabbitmq --home /var/lib/rabbitmq \
- --no-create-home --gecos "RabbitMQ messaging server" \
- --disabled-login rabbitmq
-fi
-
-chown -R rabbitmq:rabbitmq /var/lib/rabbitmq
-chown -R rabbitmq:adm /var/log/rabbitmq
-chgrp rabbitmq /etc/rabbitmq
-chmod g+s /etc/rabbitmq
-chmod 750 /var/lib/rabbitmq/mnesia
-chmod -R o-rwx,g-w /var/lib/rabbitmq/mnesia
-
-case "$1" in
- configure)
- if test -z "$2"; then
- # This is a fresh install of the package.
-
- # On a fresh install, we want to limit permissions on the
- # log directory to the owner and the group. Others won't
- # have any access to log files: this is in case sensitive
- # data are accidentally logged (like process crash data).
- chmod 2750 /var/log/rabbitmq
- else
- # The package was already configured: it's an upgrade over
- # a previously installed version, or it's an install over
- # a non-purged version (i.e. deinstalled but configuration
- # files and data are still there).
- true
- fi
-
- if [ -n "$ZSH_VERSION" ]; then
- echo "Z Shell detected.
-to enable rabbitmqctl autocompletion add the following to your .zshrc file:
-autoload _enable_rabbitmqctl_completion; _enable_rabbitmqctl_completion"
- fi
- ;;
-
- abort-upgrade|abort-remove|abort-deconfigure)
- ;;
-
- *)
- echo "postinst called with unknown argument \`$1'" >&2
- exit 1
- ;;
-esac
-
-# dh_installdeb will replace this with shell code automatically
-# generated by other debhelper scripts.
-
-#DEBHELPER#
-
-exit 0
diff --git a/packaging/debs/Debian/debian/postrm b/packaging/debs/Debian/debian/postrm
deleted file mode 100644
index e5f97ba86e..0000000000
--- a/packaging/debs/Debian/debian/postrm
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/sh
-# postrm script for rabbitmq
-#
-# see: dh_installdeb(1)
-
-set -e
-
-# summary of how this script can be called:
-# * <postrm> `remove'
-# * <postrm> `purge'
-# * <old-postrm> `upgrade' <new-version>
-# * <new-postrm> `failed-upgrade' <old-version>
-# * <new-postrm> `abort-install'
-# * <new-postrm> `abort-install' <old-version>
-# * <new-postrm> `abort-upgrade' <old-version>
-# * <disappearer's-postrm> `disappear' <overwriter>
-# <overwriter-version>
-# for details, see https://www.debian.org/doc/debian-policy/ or
-# the debian-policy package
-
-remove_plugin_traces() {
- # Remove traces of plugins
- rm -rf /var/lib/rabbitmq/plugins-scratch
-}
-
-remove_autocomplete_script() {
- if [ -f /etc/profile.d/rabbitmqctl-autocomplete.sh ]; then
- rm -rf /etc/profile.d/rabbitmqctl-autocomplete.sh
- rm -rf /usr/share/zsh/vendor-functions/_enable_rabbitmqctl_completion
- fi
-}
-
-case "$1" in
- purge)
- rm -f /etc/default/rabbitmq
- if [ -d /var/lib/rabbitmq ]; then
- rm -r /var/lib/rabbitmq
- fi
- if [ -d /var/log/rabbitmq ]; then
- rm -r /var/log/rabbitmq
- fi
- if [ -d /etc/rabbitmq ]; then
- rm -r /etc/rabbitmq
- fi
- remove_autocomplete_script
- remove_plugin_traces
- if getent passwd rabbitmq >/dev/null; then
- # Stop epmd if run by the rabbitmq user
- pkill -u rabbitmq epmd || :
- fi
- ;;
-
- remove)
- remove_autocomplete_script
- remove_plugin_traces
- ;;
- upgrade)
- remove_plugin_traces
- ;;
-
- failed-upgrade|abort-install|abort-upgrade|disappear)
- ;;
-
- *)
- echo "postrm called with unknown argument \`$1'" >&2
- exit 1
- ;;
-esac
-
-# dh_installdeb will replace this with shell code automatically
-# generated by other debhelper scripts.
-
-#DEBHELPER#
-
-exit 0
diff --git a/packaging/debs/Debian/debian/rabbitmq-server.default b/packaging/debs/Debian/debian/rabbitmq-server.default
deleted file mode 100644
index bde5e30895..0000000000
--- a/packaging/debs/Debian/debian/rabbitmq-server.default
+++ /dev/null
@@ -1,9 +0,0 @@
-# This file is sourced by /etc/init.d/rabbitmq-server. Its primary
-# reason for existing is to allow adjustment of system limits for the
-# rabbitmq-server process.
-#
-# Maximum number of open file handles. This will need to be increased
-# to handle many simultaneous connections. Refer to the system
-# documentation for ulimit (in man bash) for more information.
-#
-#ulimit -n 1024
diff --git a/packaging/debs/Debian/debian/rabbitmq-server.docs b/packaging/debs/Debian/debian/rabbitmq-server.docs
deleted file mode 100644
index 89e6828d70..0000000000
--- a/packaging/debs/Debian/debian/rabbitmq-server.docs
+++ /dev/null
@@ -1 +0,0 @@
-deps/rabbit/docs/set_rabbitmq_policy.sh.example
diff --git a/packaging/debs/Debian/debian/rabbitmq-server.init b/packaging/debs/Debian/debian/rabbitmq-server.init
deleted file mode 100644
index 80c792bba8..0000000000
--- a/packaging/debs/Debian/debian/rabbitmq-server.init
+++ /dev/null
@@ -1,200 +0,0 @@
-#!/bin/sh
-#
-# rabbitmq-server RabbitMQ broker
-#
-# chkconfig: - 80 05
-# description: Manages RabbitMQ server
-#
-
-### BEGIN INIT INFO
-# Provides: rabbitmq-server
-# Required-Start: $remote_fs $network
-# Required-Stop: $remote_fs $network
-# Default-Start: 2 3 4 5
-# Default-Stop: 0 1 6
-# Description: RabbitMQ broker
-# Short-Description: Manages RabbitMQ server
-### END INIT INFO
-
-PATH=/sbin:/usr/sbin:/bin:/usr/bin
-NAME=rabbitmq-server
-DAEMON=/usr/sbin/${NAME}
-CONTROL=/usr/sbin/rabbitmqctl
-DESC="message broker"
-USER=rabbitmq
-PID_FILE=/var/run/rabbitmq/pid
-RABBITMQ_STARTUP_TIMEOUT=600
-
-test -x $DAEMON || exit 0
-test -x $CONTROL || exit 0
-
-RETVAL=0
-set -e
-
-[ -f /etc/default/${NAME} ] && . /etc/default/${NAME}
-
-# $RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR is a directory where rabbitmq-server(8)
-# console output (both stdout and sterr) is redirected. It defaults to
-# /var/log/rabbitmq which is configured by the package already (i.e. the
-# directory is created and its ownership is set).
-#
-# We still look at the value of $RABBITMQ_LOG_BASE, possibly set in
-# /etc/default/rabbitmq-server for backward compatibility. But we use a
-# specific variable name here (instead of $RABBITMQ_LOG_BASE) to avoid any
-# confusion with RabbitMQ's own logging configuration. Indeed, the console
-# output redirection is a responsibility of the package, not RabbitMQ itself.
-: ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR:=${RABBITMQ_LOG_BASE:-/var/log/rabbitmq}}
-
-. /lib/lsb/init-functions
-. /lib/init/vars.sh
-
-ensure_pid_dir () {
- PID_DIR=`dirname ${PID_FILE}`
- if [ ! -d ${PID_DIR} ] ; then
- mkdir -p ${PID_DIR}
- chown -R ${USER}:${USER} ${PID_DIR}
- chmod 755 ${PID_DIR}
- fi
-}
-
-remove_pid () {
- rm -f ${PID_FILE}
- rmdir `dirname ${PID_FILE}` || :
-}
-
-start_rabbitmq () {
- status_rabbitmq quiet
- if [ $RETVAL != 0 ] ; then
- RETVAL=0
- ensure_pid_dir
- set +e
- RABBITMQ_PID_FILE=$PID_FILE start-stop-daemon --quiet \
- --chuid rabbitmq --start \
- --pidfile "$PID_FILE" --background \
- --startas /bin/sh -- -c "exec $DAEMON >'${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/startup_log' 2>'${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/startup_err'"
- $CONTROL wait --timeout $RABBITMQ_STARTUP_TIMEOUT $PID_FILE >/dev/null 2>&1
- RETVAL=$?
- set -e
- if [ $RETVAL != 0 ] ; then
- remove_pid
- fi
- else
- RETVAL=3
- fi
-}
-
-stop_rabbitmq () {
- status_rabbitmq quiet
- if [ $RETVAL = 0 ] ; then
- set +e
- $CONTROL stop ${PID_FILE} \
- > ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/shutdown_log \
- 2> ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/shutdown_err
- RETVAL=$?
- set -e
- if [ $RETVAL = 0 ] ; then
- remove_pid
- fi
- else
- RETVAL=3
- fi
-}
-
-status_rabbitmq() {
- set +e
- if [ "$1" != "quiet" ] ; then
- $CONTROL status 2>&1
- else
- $CONTROL status > /dev/null 2>&1
- fi
- if [ $? != 0 ] ; then
- RETVAL=3
- fi
- set -e
-}
-
-rotate_logs_rabbitmq() {
- set +e
- $CONTROL -q rotate_logs
- if [ $? != 0 ] ; then
- RETVAL=1
- fi
- set -e
-}
-
-restart_running_rabbitmq () {
- status_rabbitmq quiet
- if [ $RETVAL = 0 ] ; then
- restart_rabbitmq
- else
- log_warning_msg "${DESC} not running"
- fi
-}
-
-restart_rabbitmq() {
- stop_rabbitmq
- start_rabbitmq
-}
-
-restart_end() {
- if [ $RETVAL = 0 ] ; then
- log_end_msg 0
- else
- log_end_msg 1
- fi
-}
-
-start_stop_end() {
- case "$RETVAL" in
- 0)
- [ -x /sbin/initctl ] && /sbin/initctl emit --no-wait "${NAME}-${1}"
- log_end_msg 0
- ;;
- 3)
- log_warning_msg "${DESC} already ${1}"
- log_end_msg 0
- RETVAL=0
- ;;
- *)
- log_warning_msg "FAILED - check ${RABBITMQ_SERVER_CONSOLE_OUTPUT_DIR}/startup_\{log, _err\}"
- log_end_msg 1
- ;;
- esac
-}
-
-case "$1" in
- start)
- log_daemon_msg "Starting ${DESC}" $NAME
- start_rabbitmq
- start_stop_end "running"
- ;;
- stop)
- log_daemon_msg "Stopping ${DESC}" $NAME
- stop_rabbitmq
- start_stop_end "stopped"
- ;;
- status)
- status_rabbitmq
- ;;
- rotate-logs)
- log_action_begin_msg "Rotating log files for ${DESC}: ${NAME}"
- rotate_logs_rabbitmq
- log_action_end_msg $RETVAL
- ;;
- force-reload|reload|restart)
- log_daemon_msg "Restarting ${DESC}" $NAME
- restart_rabbitmq
- restart_end
- ;;
- try-restart)
- log_daemon_msg "Restarting ${DESC}" $NAME
- restart_running_rabbitmq
- restart_end
- ;;
- *)
- echo "Usage: $0 {start|stop|status|rotate-logs|restart|condrestart|try-restart|reload|force-reload}" >&2
- RETVAL=1
- ;;
-esac
-
-exit $RETVAL
diff --git a/packaging/debs/Debian/debian/rabbitmq-server.logrotate b/packaging/debs/Debian/debian/rabbitmq-server.logrotate
deleted file mode 100644
index 21a9fc0b84..0000000000
--- a/packaging/debs/Debian/debian/rabbitmq-server.logrotate
+++ /dev/null
@@ -1,7 +0,0 @@
-/var/log/rabbitmq/*.log {
- weekly
- missingok
- rotate 20
- compress
- notifempty
-}
diff --git a/packaging/debs/Debian/debian/rabbitmq-server.manpages b/packaging/debs/Debian/debian/rabbitmq-server.manpages
deleted file mode 100644
index 2ef2376ee2..0000000000
--- a/packaging/debs/Debian/debian/rabbitmq-server.manpages
+++ /dev/null
@@ -1,4 +0,0 @@
-deps/rabbit/docs/rabbitmq-env.conf.5
-deps/rabbit/docs/rabbitmq-plugins.8
-deps/rabbit/docs/rabbitmq-server.8
-deps/rabbit/docs/rabbitmqctl.8
diff --git a/packaging/debs/Debian/debian/rabbitmq-server.service b/packaging/debs/Debian/debian/rabbitmq-server.service
deleted file mode 100644
index 3339a2f5f0..0000000000
--- a/packaging/debs/Debian/debian/rabbitmq-server.service
+++ /dev/null
@@ -1,39 +0,0 @@
-# systemd unit example
-[Unit]
-Description=RabbitMQ broker
-After=network.target epmd@0.0.0.0.socket
-Wants=network.target epmd@0.0.0.0.socket
-
-[Service]
-Type=notify
-User=rabbitmq
-Group=rabbitmq
-UMask=0027
-NotifyAccess=all
-TimeoutStartSec=600
-
-# To override LimitNOFILE, create the following file:
-#
-# /etc/systemd/system/rabbitmq-server.service.d/limits.conf
-#
-# with the following content:
-#
-# [Service]
-# LimitNOFILE=65536
-
-LimitNOFILE=32768
-
-# The following setting will automatically restart RabbitMQ
-# in the event of a failure. systemd service restarts are not a
-# replacement for service monitoring. Please see
-# https://www.rabbitmq.com/monitoring.html
-Restart=on-failure
-RestartSec=10
-WorkingDirectory=/var/lib/rabbitmq
-ExecStart=/usr/lib/rabbitmq/bin/rabbitmq-server
-ExecStop=/usr/lib/rabbitmq/bin/rabbitmqctl shutdown
-# See rabbitmq/rabbitmq-server-release#51
-SuccessExitStatus=69
-
-[Install]
-WantedBy=multi-user.target
diff --git a/packaging/debs/Debian/debian/rules b/packaging/debs/Debian/debian/rules
deleted file mode 100755
index 1a98c2831e..0000000000
--- a/packaging/debs/Debian/debian/rules
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/make -f
-# -*- makefile -*-
-
-# Uncomment this to turn on verbose mode.
-#export DH_VERBOSE=1
-
-DEB_DESTDIR = debian/rabbitmq-server
-RABBITMQ_USER ?= rabbitmq
-RABBITMQ_GROUP ?= rabbitmq
-
-unexport DEPS_DIR
-
-%:
- dh $@ --parallel --with systemd
-
-override_dh_auto_clean:
- $(MAKE) clean clean-unpacked-source-dist distclean-manpages
- rm -rf .erlang.mk
-
-override_dh_auto_build:
- $(MAKE) dist manpages
-
-override_dh_auto_test:
- @:
-
-export PREFIX RMQ_ROOTDIR
-
-override_dh_auto_install: PREFIX = /usr
-override_dh_auto_install: RMQ_ROOTDIR = $(PREFIX)/lib/rabbitmq
-override_dh_auto_install: RMQ_ERLAPP_DIR = $(RMQ_ROOTDIR)/lib/rabbitmq_server-*
-override_dh_auto_install:
- dh_auto_install
-
- $(MAKE) install-bin DESTDIR=$(DEB_DESTDIR)
-
- sed \
- -e 's|@RABBITMQ_USER@|$(RABBITMQ_USER)|' -e 's|@RABBITMQ_GROUP@|$(RABBITMQ_GROUP)|' \
- < scripts/rabbitmq-script-wrapper \
- > $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl
- chmod 0755 $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl
- for script in rabbitmq-server rabbitmq-plugins rabbitmq-diagnostics rabbitmq-queues rabbitmq-upgrade; do \
- cp -a $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl \
- $(DEB_DESTDIR)$(PREFIX)/sbin/$$script; \
- done
- install -p -D -m 0644 debian/rabbitmq-server.default \
- $(DEB_DESTDIR)/etc/default/rabbitmq-server
-
- install -p -D -m 0755 scripts/rabbitmq-server.ocf \
- $(DEB_DESTDIR)$(PREFIX)/lib/ocf/resource.d/rabbitmq/rabbitmq-server
- install -p -D -m 0755 scripts/rabbitmq-server-ha.ocf \
- $(DEB_DESTDIR)$(PREFIX)/lib/ocf/resource.d/rabbitmq/rabbitmq-server-ha
-
- install -p -D -m 0755 scripts/rabbitmqctl-autocomplete.sh \
- $(DEB_DESTDIR)/etc/profile.d/rabbitmqctl-autocomplete.sh
-
- install -p -D -m 0755 scripts/zsh_autocomplete.sh \
- $(DEB_DESTDIR)/usr/share/zsh/vendor-functions/_enable_rabbitmqctl_completion
-
- rm $(DEB_DESTDIR)$(RMQ_ERLAPP_DIR)/LICENSE* \
- $(DEB_DESTDIR)$(RMQ_ERLAPP_DIR)/INSTALL
-
- rmdir $(DEB_DESTDIR)$(PREFIX)/lib/erlang/lib \
- $(DEB_DESTDIR)$(PREFIX)/lib/erlang
diff --git a/packaging/debs/Debian/debian/source/format b/packaging/debs/Debian/debian/source/format
deleted file mode 100644
index 163aaf8d82..0000000000
--- a/packaging/debs/Debian/debian/source/format
+++ /dev/null
@@ -1 +0,0 @@
-3.0 (quilt)
diff --git a/packaging/debs/Debian/debian/watch b/packaging/debs/Debian/debian/watch
deleted file mode 100644
index cbf9b949db..0000000000
--- a/packaging/debs/Debian/debian/watch
+++ /dev/null
@@ -1,4 +0,0 @@
-version=4
-opts="filenamemangle=s%(?:.*?)?v?@PACKAGE@-(\d[\d.]*)\.tar\.xz%@PACKAGE@-$1.tar.xz%" \
- https://github.com/rabbitmq/@PACKAGE@/releases \
- (?:.*?/)?v?@PACKAGE@-(\d[\d.]*)\.tar\.xz debian uupdate
diff --git a/packaging/debs/Debian/scripts/compare-debian-versions.py b/packaging/debs/Debian/scripts/compare-debian-versions.py
deleted file mode 100755
index ec138fa623..0000000000
--- a/packaging/debs/Debian/scripts/compare-debian-versions.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python2
-
-import apt_pkg
-import os.path
-import sys
-
-if len(sys.argv) != 3:
- print('Syntax: %s <version a> <version b>' % (os.path.basename(sys.argv[0])))
- sys.exit(64)
-
-a = sys.argv[1]
-b = sys.argv[2]
-
-apt_pkg.init_system()
-vc = apt_pkg.version_compare(a,b)
-
-if vc > 0:
- print('%s < %s' % (b, a))
-elif vc == 0:
- print('%s = %s' % (a, b))
-elif vc < 0:
- print('%s < %s' % (a, b))
diff --git a/packaging/debs/Debian/scripts/format-package-version b/packaging/debs/Debian/scripts/format-package-version
deleted file mode 100755
index 9239b53af9..0000000000
--- a/packaging/debs/Debian/scripts/format-package-version
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-# vim:sw=2:et:
-
-set -e
-
-VERSION=$1
-
-echo "$VERSION" | sed -E -e 's/-/~/g'
diff --git a/packaging/debs/Debian/scripts/get-debian-package-files-list.sh b/packaging/debs/Debian/scripts/get-debian-package-files-list.sh
deleted file mode 100755
index 5865ede557..0000000000
--- a/packaging/debs/Debian/scripts/get-debian-package-files-list.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/sh
-
-set -e
-
-changes_file=$1
-if [ -z "$changes_file" ]; then
- echo "Syntax: $(basename $0) <changes file>" 1>&2
- exit 64
-fi
-
-if [ ! -f "$changes_file" ]; then
- exit 0
-fi
-
-changes_file_dir=$(dirname "$changes_file")
-if test "$changes_file_dir" = '.'; then
- changes_file_dir=
-else
- changes_file_dir="$changes_file_dir/"
-fi
-
-# Include the .changes file itself in the list.
-echo "$changes_file_dir$(basename "$changes_file")"
-
-# We want to match lines with the following format (it starts
-# with a single space):
-# f752d307528f0ca87d57995c217344ec 5184732 net extra rabbitmq-(...)
-awk '
-/^ [a-fA-F0-9]+ / {
- if (length($1) == 32) {
- print "'$changes_file_dir'" $5;
- }
-}' < "$changes_file"
diff --git a/packaging/debs/Debian/scripts/update-changelog.sh b/packaging/debs/Debian/scripts/update-changelog.sh
deleted file mode 100755
index 70076bfaf6..0000000000
--- a/packaging/debs/Debian/scripts/update-changelog.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/sh
-
-set -e
-
-PACKAGE_VERSION=$1
-
-if test -z "$PACKAGE_VERSION"; then
- echo "Syntax: $(basename "$0") <debian version>" 1>&2
- exit 64
-fi
-
-PACKAGE_NAME=$(awk '/^Source:/ { print $2; }' < debian/control)
-CHANGELOG_DATE=$(date -R)
-
-if dpkg-parsechangelog --all | \
- grep -E -q "^ $PACKAGE_NAME \(${PACKAGE_VERSION}-[^)]\) "; then
- exit 0
-fi
-
-cat > debian/changelog.tmp <<EOF
-${PACKAGE_NAME} (${PACKAGE_VERSION}-1) unstable; urgency=low
-
- * New Upstream Release.
-
- -- RabbitMQ Team <info@rabbitmq.com> ${CHANGELOG_DATE}
-
-EOF
-
-cat debian/changelog >> debian/changelog.tmp
-mv -f debian/changelog.tmp debian/changelog
-
-echo
-echo '--------------------------------------------------'
-dpkg-parsechangelog
-echo '--------------------------------------------------'
-echo
diff --git a/packaging/debs/apt-repository/Makefile b/packaging/debs/apt-repository/Makefile
deleted file mode 100644
index dbf8871a51..0000000000
--- a/packaging/debs/apt-repository/Makefile
+++ /dev/null
@@ -1,31 +0,0 @@
-PACKAGES_DIR ?= ../../../PACKAGES
-REPO_DIR ?= debian
-
-SIGNING_KEY ?= default
-
-ifeq "$(UNOFFICIAL_RELEASE)" ""
-HOME_ARG = HOME=$(GNUPG_PATH)
-endif
-
-all: debian_apt_repository
-
-clean:
- rm -rf $(REPO_DIR)
-
-CAN_HAS_REPREPRO=$(shell [ -f /usr/bin/reprepro ] && echo true)
-ifeq ($(CAN_HAS_REPREPRO), true)
-debian_apt_repository: clean
- mkdir -p $(REPO_DIR)/conf
- cp -a distributions $(REPO_DIR)/conf
-ifeq "$(UNOFFICIAL_RELEASE)" ""
- echo SignWith: $(SIGNING_KEY) >> $(REPO_DIR)/conf/distributions
-endif
- for FILE in $(PACKAGES_DIR)/*.changes ; do \
- $(HOME_ARG) reprepro --ignore=wrongdistribution \
- -Vb $(REPO_DIR) include kitten $${FILE} ; \
- done
- reprepro -Vb $(REPO_DIR) createsymlinks
-else
-debian_apt_repository:
- @echo Not building APT repository as reprepro could not be found
-endif
diff --git a/packaging/debs/apt-repository/README b/packaging/debs/apt-repository/README
deleted file mode 100644
index 514a37f33e..0000000000
--- a/packaging/debs/apt-repository/README
+++ /dev/null
@@ -1,17 +0,0 @@
-APT repository for RabbitMQ
-
-Previously we've attempted to run a repository in the same way that
-Debian would: have repository management software installed on the
-server, and upload new packages to the repository as and when they're
-ready.
-
-This turned out to be both fiddly and annoying to do (and more
-particularly to automate) so since our repository is always going to be
-small it's easier just to create the entire repository as part of the
-build process, just like a package. It can then be moved into place as a
-single unit. The make target "debian_apt_repository" (invoked by "dist")
-will create it, and it can get moved onto the server with the rest of
-the packages.
-
-Read "README-real-repository" for information on how we used to do
-this.
diff --git a/packaging/debs/apt-repository/README-real-repository b/packaging/debs/apt-repository/README-real-repository
deleted file mode 100644
index faa7b62425..0000000000
--- a/packaging/debs/apt-repository/README-real-repository
+++ /dev/null
@@ -1,130 +0,0 @@
-APT Repository for RabbitMQ in Debian
-=====================================
-
-First, a note on what we're trying to do. We want a single "testing"
-repository. When RabbitMQ is more stable we will also want a
-"stable" repository. It is very important to understand that these refer
-to the state of the rabbit code, *NOT* which Debian distribution they go
-with. At the moment our dependencies are very simple so our packages can
-be used with any current Debian version (etch, lenny, sid) as well as
-with Ubuntu. So although we have a "testing" distribution, this is not
-codenamed "lenny". Instead it's currently codenamed "kitten" since
-that's a baby rabbit.
-
-Secondly, a note on software. We need a tool to manage the repository,
-and a tool to perform uploads to the repository. Debian being Debian
-there are quite a few of each. We will use "reprepro" to manage the
-repository since it's modern, maintained, and fairly simple. We will use
-"dupload" to perform the uploads since it gives us the ability to run
-arbitrary commands after the upload, which means we don't need to run a
-cron job on the web server to process uploads.
-
-Creating a repository
-=====================
-
-Much of this was cribbed from:
-https://www.debian-administration.org/articles/286
-
-The repository is fundamentally just some files in a folder, served over
-HTTP (or FTP etc). So let's make it "debian" in the root of
-www.rabbitmq.com.
-
-This means the repository will be at https://www.rabbitmq.com/debian/ and
-can be added to a sources.list as:
-
-deb https://www.rabbitmq.com/debian/ testing main
-deb-src https://www.rabbitmq.com/debian/ testing main
-
-Inside this folder we need a "conf" folder, and in
-that we need a "distributions" configuration file - see the file in this
-folder. Note that:
-
-* We list all architectures so that people can install rabbitmq-server
- on to anything.
-* We don't list the "all" architecture even though we use it; it's
- implied.
-* We only have a "main" component, we could have non-free and contrib
- here if it was relevant.
-* We list the email address associated with the key we want to use to
- sign the repository. Yes, even after signing packages we still want to
- sign the repository.
-
-We're now ready to go. Assuming the path to our repository is /path,
-(and hence configuration is in /path/conf) we can upload a file to the
-repository (creating it in the process) by doing something like this on
-the repository host:
-
-$ reprepro --ignore=wrongdistribution -Vb /path include kitten \
- rabbitmq-server_1.0.0-alpha-1_i386.changes
-
-Note that we upload to the distribution "kitten" rather than "testing".
-We also pass --ignore=wrongdistribution since the current packages are
-built to go in "unstable" (this will be changed obviously).
-
-Note also that the .changes file claims to be for i386 even though the
-package is for architecture "all". This is a bug in debhelper.
-
-Finally, if you've just created a repository, you want to run:
-
-$ reprepro -Vb /path createsymlinks
-
-since this will create "kitten" -> "testing" symlinks. You only need to
-do this once.
-
-Removing packages
-=================
-
-Fairly simple:
-
-$ reprepro --ignore=wrongdistribution -Vb /path remove kitten \
- rabbitmq-server
-
-Subsequent updates and "dupload"
-================================
-
-You can run the "reprepro" command above again to update the versions of
-software in the repository. Since we probably don't want to have to log
-into the machine in question to do this, we can use "dupload". This is a
-tool which uploads Debian packages. The supplied file "dupload.conf" can
-be renamed to ~/.dupload.conf. If you then run:
-
-$ dupload -to rabbit --nomail .
-
-in the folder with the .changes file, dupload will:
-
-* create an incoming folder in your home directory on the repository
-machine
-* upload everything there
-* run reprepro to move the packages into the repository
-* "rm -rf" the uploads folder
-
-This is a bit cheesy but should be enough for our purposes. The
-dupload.conf uses scp and ssh so you need a public-key login (or type
-your password lots).
-
-There's still an open question as to whether dupload is really needed
-for our case.
-
-Keys and signing
-================
-
-We currently sign the package as we build it; but we also need to sign
-the repository. The key is currently on my machine (mrforgetful) and has
-ID 056E8E56. We should put it on CDs though.
-
-reprepro will automatically sign the repository if we have the right
-SignWith line in the configuration, AND the secret key is installed on
-the repository server. This is obviously not ideal; not sure what the
-solution is right now.
-
-You can export the public key with:
-
-$ gpg --export --armor 056E8E56 > rabbit.pub
-
-(Open question: do we want to get our key on subkeys.pgp.net?)
-
-We can then add this key to the website and tell our users to import the
-key into apt with:
-
-# apt-key add rabbit.pub
-
diff --git a/packaging/debs/apt-repository/distributions b/packaging/debs/apt-repository/distributions
deleted file mode 100644
index 00d6b219f1..0000000000
--- a/packaging/debs/apt-repository/distributions
+++ /dev/null
@@ -1,7 +0,0 @@
-Origin: RabbitMQ
-Label: RabbitMQ Repository for Debian / Ubuntu etc
-Suite: testing
-Codename: kitten
-Architectures: alpha amd64 arm arm64 armeb armel armhf avr32 hppa hurd-i386 i386 ia64 kfreebsd-amd64 kfreebsd-i386 m32 m32r m68k mips mips64 mips64el mips64r6 mips64r6el mipsel mipsr6 mipsr6el netbsd-alpha netbsd-i386 nios2 or1k powerpc powerpcel ppc64 ppc64el s390 s390x sh sh3 sh3eb sh4 sh4eb sparc sparc64 tilegx source
-Components: main
-Description: RabbitMQ Repository for Debian / Ubuntu etc
diff --git a/packaging/debs/apt-repository/dupload.conf b/packaging/debs/apt-repository/dupload.conf
deleted file mode 100644
index 9ceed76000..0000000000
--- a/packaging/debs/apt-repository/dupload.conf
+++ /dev/null
@@ -1,16 +0,0 @@
-package config;
-
-$rabbit_user = "simon";
-$rabbit_host = "mrforgetful.lshift.net";
-$rabbit_repo_path = "/srv/debian";
-$rabbit_reprepro_extra_args = "--ignore=wrongdistribution";
-
-$cfg{'rabbit'} = {
- fqdn => "$rabbit_host",
- login => "$rabbit_user",
- method => "scp",
- incoming => "incoming",
-};
-
-$preupload{'deb'} = "ssh ${rabbit_host} mkdir incoming";
-$postupload{'deb'} = "ssh ${rabbit_host} \"cd incoming && reprepro ${$rabbit_reprepro_extra_args} -Vb ${rabbit_repo_path} include kitten *.changes && cd .. && rm -r incoming\"";
diff --git a/packaging/docker-image/10-default-guest-user.conf b/packaging/docker-image/10-default-guest-user.conf
new file mode 100644
index 0000000000..3d905739f3
--- /dev/null
+++ b/packaging/docker-image/10-default-guest-user.conf
@@ -0,0 +1,8 @@
+## DEFAULT SETTINGS ARE NOT MEANT TO BE TAKEN STRAIGHT INTO PRODUCTION
+## see https://www.rabbitmq.com/configure.html for further information
+## on configuring RabbitMQ
+
+## allow access to the guest user from anywhere on the network
+## https://www.rabbitmq.com/access-control.html#loopback-users
+## https://www.rabbitmq.com/production-checklist.html#users
+loopback_users.guest = false
diff --git a/packaging/docker-image/Dockerfile b/packaging/docker-image/Dockerfile
index 1089bb1ec9..48d33875fc 100644
--- a/packaging/docker-image/Dockerfile
+++ b/packaging/docker-image/Dockerfile
@@ -1,6 +1,6 @@
# The official Canonical Ubuntu Bionic image is ideal from a security perspective,
# especially for the enterprises that we, the RabbitMQ team, have to deal with
-FROM ubuntu:18.04
+FROM ubuntu:20.04
RUN set -eux; \
apt-get update; \
@@ -12,6 +12,9 @@ RUN set -eux; \
# verify that the "gosu" binary works
gosu nobody true
+# PGP key servers are too flaky for us to verify during every CI triggered build
+# https://github.com/docker-library/official-images/issues/4252
+ARG SKIP_PGP_VERIFY=false
# Default to a PGP keyserver that pgp-happy-eyeballs recognizes, but allow for substitutions locally
ARG PGP_KEYSERVER=ha.pool.sks-keyservers.net
# If you are building this image locally and are getting `gpg: keyserver receive failed: No data` errors,
@@ -33,7 +36,7 @@ ARG OTP_SHA256
ENV OTP_SOURCE_SHA256=${OTP_SHA256}
# Install dependencies required to build Erlang/OTP from source
-# http://erlang.org/doc/installation_guide/INSTALL.html
+# https://erlang.org/doc/installation_guide/INSTALL.html
# autoconf: Required to configure Erlang/OTP before compiling
# dpkg-dev: Required to set up host & build type when compiling Erlang/OTP
# gnupg: Required to verify OpenSSL artefacts
@@ -47,6 +50,7 @@ RUN set -eux; \
ca-certificates \
dpkg-dev \
gcc \
+ g++ \
gnupg \
libncurses5-dev \
make \
@@ -65,7 +69,7 @@ RUN set -eux; \
for key in $OPENSSL_PGP_KEY_IDS; do \
gpg --batch --keyserver "$PGP_KEYSERVER" --recv-keys "$key" || true; \
done; \
- gpg --batch --verify "$OPENSSL_PATH.tar.gz.asc" "$OPENSSL_PATH.tar.gz"; \
+ test "$SKIP_PGP_VERIFY" == "true" || gpg --batch --verify "$OPENSSL_PATH.tar.gz.asc" "$OPENSSL_PATH.tar.gz"; \
gpgconf --kill all; \
rm -rf "$GNUPGHOME"; \
echo "$OPENSSL_SOURCE_SHA256 *$OPENSSL_PATH.tar.gz" | sha256sum --check --strict -; \
@@ -106,7 +110,7 @@ RUN set -eux; \
tar --extract --file "$OTP_PATH.tar.gz" --directory "$OTP_PATH" --strip-components 1; \
\
# Configure Erlang/OTP for compilation, disable unused features & applications
-# http://erlang.org/doc/applications.html
+# https://erlang.org/doc/applications.html
# ERL_TOP is required for Erlang/OTP makefiles to find the absolute path for the installation
cd "$OTP_PATH"; \
export ERL_TOP="$OTP_PATH"; \
@@ -124,6 +128,7 @@ RUN set -eux; \
--disable-hipe \
--disable-sctp \
--disable-silent-rules \
+ --enable-jit \
--enable-clock-gettime \
--enable-hybrid-heap \
--enable-kernel-poll \
@@ -137,7 +142,6 @@ RUN set -eux; \
--without-diameter \
--without-edoc \
--without-erl_docgen \
- --without-erl_interface \
--without-et \
--without-eunit \
--without-ftp \
@@ -184,14 +188,11 @@ ENV RABBITMQ_DATA_DIR=/var/lib/rabbitmq
RUN set -eux; \
groupadd --gid 999 --system rabbitmq; \
useradd --uid 999 --system --home-dir "$RABBITMQ_DATA_DIR" --gid rabbitmq rabbitmq; \
- mkdir -p "$RABBITMQ_DATA_DIR" /etc/rabbitmq /tmp/rabbitmq-ssl /var/log/rabbitmq; \
- chown -fR rabbitmq:rabbitmq "$RABBITMQ_DATA_DIR" /etc/rabbitmq /tmp/rabbitmq-ssl /var/log/rabbitmq; \
- chmod 777 "$RABBITMQ_DATA_DIR" /etc/rabbitmq /tmp/rabbitmq-ssl /var/log/rabbitmq; \
+ mkdir -p "$RABBITMQ_DATA_DIR" /etc/rabbitmq /etc/rabbitmq/conf.d /tmp/rabbitmq-ssl /var/log/rabbitmq; \
+ chown -fR rabbitmq:rabbitmq "$RABBITMQ_DATA_DIR" /etc/rabbitmq /etc/rabbitmq/conf.d /tmp/rabbitmq-ssl /var/log/rabbitmq; \
+ chmod 777 "$RABBITMQ_DATA_DIR" /etc/rabbitmq /etc/rabbitmq/conf.d /tmp/rabbitmq-ssl /var/log/rabbitmq; \
ln -sf "$RABBITMQ_DATA_DIR/.erlang.cookie" /root/.erlang.cookie
-# Use the latest alpha RabbitMQ 3.8 release - https://dl.bintray.com/rabbitmq/all-dev/rabbitmq-server/
-ARG RABBITMQ_VERSION
-ENV RABBITMQ_VERSION=${RABBITMQ_VERSION}
# https://www.rabbitmq.com/signatures.html#importing-gpg
# ENV RABBITMQ_PGP_KEY_ID="0x0A9AF2115F4687BD29803A206B73A36E6026DFCA"
ENV RABBITMQ_HOME=/opt/rabbitmq
@@ -216,21 +217,6 @@ RUN set -eux; \
; \
rm -rf /var/lib/apt/lists/*; \
\
- # RABBITMQ_SOURCE_URL="https://dl.bintray.com/rabbitmq/all-dev/rabbitmq-server/$RABBITMQ_VERSION/rabbitmq-server-generic-unix-latest-toolchain-${RABBITMQ_VERSION}.tar.xz"; \
- # RABBITMQ_PATH="/usr/local/src/rabbitmq-$RABBITMQ_VERSION"; \
- \
- # wget --progress dot:giga --output-document "$RABBITMQ_PATH.tar.xz.asc" "$RABBITMQ_SOURCE_URL.asc"; \
- # wget --progress dot:giga --output-document "$RABBITMQ_PATH.tar.xz" "$RABBITMQ_SOURCE_URL"; \
- \
- # export GNUPGHOME="$(mktemp -d)"; \
- # gpg --batch --keyserver "$PGP_KEYSERVER" --recv-keys "$RABBITMQ_PGP_KEY_ID"; \
- # gpg --batch --verify "$RABBITMQ_PATH.tar.xz.asc" "$RABBITMQ_PATH.tar.xz"; \
- # gpgconf --kill all; \
- # rm -rf "$GNUPGHOME"; \
- \
- # mkdir -p "$RABBITMQ_HOME"; \
- # tar --extract --file "$RABBITMQ_PATH.tar.xz" --directory "$RABBITMQ_HOME" --strip-components 1; \
- # rm -rf "$RABBITMQ_PATH"*; \
# Do not default SYS_PREFIX to RABBITMQ_HOME, leave it empty
grep -qE '^SYS_PREFIX=\$\{RABBITMQ_HOME\}$' "$RABBITMQ_HOME/sbin/rabbitmq-defaults"; \
sed -i 's/^SYS_PREFIX=.*$/SYS_PREFIX=/' "$RABBITMQ_HOME/sbin/rabbitmq-defaults"; \
@@ -264,9 +250,11 @@ VOLUME $RABBITMQ_DATA_DIR
# https://docs.docker.com/samples/library/ubuntu/#locales
ENV LANG=C.UTF-8 LANGUAGE=C.UTF-8 LC_ALL=C.UTF-8
+COPY --chown=rabbitmq:rabbitmq 10-default-guest-user.conf /etc/rabbitmq/conf.d/
COPY docker-entrypoint.sh /usr/local/bin/
ENTRYPOINT ["docker-entrypoint.sh"]
+# EPMD AMQP-TLS AMQP ERLANG
EXPOSE 4369 5671 5672 25672
CMD ["rabbitmq-server"]
@@ -275,27 +263,49 @@ RUN rabbitmq-plugins enable --offline rabbitmq_management && \
rabbitmq-plugins is_enabled rabbitmq_management --offline
# extract "rabbitmqadmin" from inside the "rabbitmq_management-X.Y.Z.ez" plugin zipfile
# see https://github.com/docker-library/rabbitmq/issues/207
+# RabbitMQ 3.9 onwards uses uncompressed plugins by default, in which case extraction is
+# unnecesary
RUN set -eux; \
- erl -noinput -eval ' \
- { ok, AdminBin } = zip:foldl(fun(FileInArchive, GetInfo, GetBin, Acc) -> \
- case Acc of \
- "" -> \
- case lists:suffix("/rabbitmqadmin", FileInArchive) of \
- true -> GetBin(); \
- false -> Acc \
- end; \
- _ -> Acc \
- end \
- end, "", init:get_plain_arguments()), \
- io:format("~s", [ AdminBin ]), \
- init:stop(). \
- ' -- /plugins/rabbitmq_management-*.ez > /usr/local/bin/rabbitmqadmin; \
+ if [ -s /plugins/rabbitmq_management-*.ez ]; then \
+ erl -noinput -eval ' \
+ { ok, AdminBin } = zip:foldl(fun(FileInArchive, GetInfo, GetBin, Acc) -> \
+ case Acc of \
+ "" -> \
+ case lists:suffix("/rabbitmqadmin", FileInArchive) of \
+ true -> GetBin(); \
+ false -> Acc \
+ end; \
+ _ -> Acc \
+ end \
+ end, "", init:get_plain_arguments()), \
+ io:format("~s", [ AdminBin ]), \
+ init:stop(). \
+ ' -- /plugins/rabbitmq_management-*.ez > /usr/local/bin/rabbitmqadmin; \
+ else \
+ cp /plugins/rabbitmq_management-*/priv/www/cli/rabbitmqadmin /usr/local/bin/rabbitmqadmin; \
+ fi; \
[ -s /usr/local/bin/rabbitmqadmin ]; \
chmod +x /usr/local/bin/rabbitmqadmin; \
- apt-get update; apt-get install -y --no-install-recommends python3; rm -rf /var/lib/apt/lists/*; \
+ apt-get update; apt-get install -y --no-install-recommends python3 dstat sysstat htop nmon tmux neovim; rm -rf /var/lib/apt/lists/*; \
rabbitmqadmin --version
+# MANAGEMENT-TLS MANAGEMENT
EXPOSE 15671 15672
RUN rabbitmq-plugins enable --offline rabbitmq_prometheus && \
rabbitmq-plugins is_enabled rabbitmq_prometheus --offline
-EXPOSE 15692
+# PROMETHEUS-TLS PROMETHEUS
+EXPOSE 15691 15692
+
+RUN rabbitmq-plugins enable --all
+# STREAM-TLS STREAM
+EXPOSE 5551 5552
+# MQTT-TLS MQTT
+EXPOSE 8883 1883
+# WEB-MQTT-TLS WEB-MQTT
+EXPOSE 15676 15675
+# STOMP-TLS STOMP
+EXPOSE 61614 61613
+# WEB-STOMP-TLS WEB-STOMP
+EXPOSE 15673 15674
+# EXAMPLES
+EXPOSE 15670
diff --git a/packaging/docker-image/Makefile b/packaging/docker-image/Makefile
index 2a39c7bade..31c74b970e 100644
--- a/packaging/docker-image/Makefile
+++ b/packaging/docker-image/Makefile
@@ -1,35 +1,81 @@
-GENERIC_UNIX_ARCHIVE ?= $(wildcard $(PACKAGES_DIR)/rabbitmq-server-generic-unix-*.tar.xz)
+# PROJECT_VERSION defaults to:
+# 1. the version exported by rabbitmq-server-release;
+# 2. the version stored in `git-revisions.txt`, if it exists;
+# 3. a version based on git-describe(1), if it is a Git clone;
+# 4. 0.0.0
+
+PROJECT_VERSION := $(RABBITMQ_VERSION)
+
+ifeq ($(PROJECT_VERSION),)
+PROJECT_VERSION := $(shell \
+if test -f git-revisions.txt; then \
+ head -n1 git-revisions.txt | \
+ awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
+else \
+ (git describe --dirty --abbrev=7 --tags --always --first-parent \
+ 2>/dev/null || echo rabbitmq_v0_0_0) | \
+ sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
+ -e 's/-/./g'; \
+fi)
+endif
+
+ifeq ($(filter-out all dist push clean,$(MAKECMDGOALS)),)
+GENERIC_UNIX_ARCHIVE ?= $(wildcard $(PACKAGES_DIR)/rabbitmq-server-generic-unix-$(PROJECT_VERSION).tar.xz)
-ifneq ($(filter-out clean,$(MAKECMDGOALS)),)
ifeq ($(GENERIC_UNIX_ARCHIVE),)
$(error Cannot find generic-unix archive; please specify GENERIC_UNIX_ARCHIVE)
endif
-ifneq ($(words $(SOURCE_DIST_FILE)),1)
-$(error Multiple generic-unix archives found; please specify GENERIC_UNIX_ARCHIVE)
-endif
VERSION ?= $(patsubst rabbitmq-server-generic-unix-%.tar.xz,%,$(notdir $(GENERIC_UNIX_ARCHIVE)))
ifeq ($(VERSION),)
$(error Cannot determine version; please specify VERSION)
endif
+
+IMAGE_TAG_1 ?= $(subst +,-,$(VERSION))
endif
-OTP_VERSION ?= 22.3
-OTP_SHA256 ?= 886e6dbe1e4823c7e8d9c9c1ba8315075a1a9f7717f5a1eaf3b98345ca6c798e
+OTP_VERSION ?= 24.0.2
+OTP_SHA256 ?= 4abca2cda7fc962ad65575e5ed834dd69c745e7e637f92cfd49f384a281d0f18
+REPO ?= pivotalrabbitmq/rabbitmq
+SKIP_PGP_VERIFY ?= false
+PGP_KEYSERVER ?= pgpkeys.eu
+ALT1_PGP_KEYSERVER ?= keyserver.ubuntu.com
+ALT2_PGP_KEYSERVER ?= pgpkeys.uk
all: dist
dist:
xzcat $(GENERIC_UNIX_ARCHIVE) | tar xvf -
- echo docker build --pull \
- --build-arg PGP_KEYSERVER=pgpkeys.uk \
+ docker build --pull \
+ --build-arg SKIP_PGP_VERIFY=$(SKIP_PGP_VERIFY) \
+ --build-arg PGP_KEYSERVER=$(PGP_KEYSERVER) \
--build-arg OTP_VERSION=$(OTP_VERSION) \
--build-arg OTP_SHA256=$(OTP_SHA256) \
- --build-arg RABBITMQ_VERSION=$(VERSION) \
--build-arg RABBITMQ_BUILD=rabbitmq_server-$(VERSION) \
- --tag pivotalrabbitmq/rabbitmq:$(VERSION) \
- . \
- && docker push pivotalrabbitmq/rabbitmq:$(VERSION)
+ --tag $(REPO):$(IMAGE_TAG_1) \
+ .
+
+push:
+ docker push $(REPO):$(IMAGE_TAG_1)
+ifdef IMAGE_TAG_2
+ docker tag $(REPO):$(IMAGE_TAG_1) $(REPO):$(IMAGE_TAG_2)
+ docker push $(REPO):$(IMAGE_TAG_2)
+endif
clean:
rm -rf rabbitmq_server-*
+
+OTP_VERSION_MATCH ?= 24[0-9.]+
+define LATEST_STABLE_OTP_VERSION
+curl --silent --fail https://api.github.com/repos/erlang/otp/git/refs/tags | \
+ jq -r '.[].ref | sub("refs/tags/OTP.{1}";"") | match("^$(OTP_VERSION_MATCH)$$") | .string' | \
+ tail -n 1
+endef
+.PHONY: find-otp-sha256
+find-otp-sha256:
+ @printf "Version: " && \
+ export VERSION="$$($(LATEST_STABLE_OTP_VERSION))" && \
+ echo "$$VERSION" && \
+ printf "Checksum: " && \
+ wget --continue --quiet --output-document="/tmp/OTP-$$VERSION.tar.gz" "https://github.com/erlang/otp/archive/OTP-$$VERSION.tar.gz" && \
+ shasum -a 256 "/tmp/OTP-$$VERSION.tar.gz"
diff --git a/packaging/docker-image/docker-entrypoint.sh b/packaging/docker-image/docker-entrypoint.sh
index 55577a9e6e..722dc1e235 100755
--- a/packaging/docker-image/docker-entrypoint.sh
+++ b/packaging/docker-image/docker-entrypoint.sh
@@ -1,108 +1,8 @@
-#!/bin/bash
-set -eu
-
-# usage: file_env VAR [DEFAULT]
-# ie: file_env 'XYZ_DB_PASSWORD' 'example'
-# (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of
-# "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature)
-file_env() {
- local var="$1"
- local fileVar="${var}_FILE"
- local def="${2:-}"
- if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
- echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
- exit 1
- fi
- local val="$def"
- if [ "${!var:-}" ]; then
- val="${!var}"
- elif [ "${!fileVar:-}" ]; then
- val="$(< "${!fileVar}")"
- fi
- export "$var"="$val"
- unset "$fileVar"
-}
-
-# backwards compatibility for old environment variables
-: "${RABBITMQ_SSL_CERTFILE:=${RABBITMQ_SSL_CERT_FILE:-}}"
-: "${RABBITMQ_SSL_KEYFILE:=${RABBITMQ_SSL_KEY_FILE:-}}"
-: "${RABBITMQ_SSL_CACERTFILE:=${RABBITMQ_SSL_CA_FILE:-}}"
-
-# "management" SSL config should default to using the same certs
-: "${RABBITMQ_MANAGEMENT_SSL_CACERTFILE:=$RABBITMQ_SSL_CACERTFILE}"
-: "${RABBITMQ_MANAGEMENT_SSL_CERTFILE:=$RABBITMQ_SSL_CERTFILE}"
-: "${RABBITMQ_MANAGEMENT_SSL_KEYFILE:=$RABBITMQ_SSL_KEYFILE}"
-
-# Allowed env vars that will be read from mounted files (i.e. Docker Secrets):
-fileEnvKeys=(
- default_user
- default_pass
-)
-
-# https://www.rabbitmq.com/configure.html
-sslConfigKeys=(
- cacertfile
- certfile
- depth
- fail_if_no_peer_cert
- keyfile
- verify
-)
-managementConfigKeys=(
- "${sslConfigKeys[@]/#/ssl_}"
-)
-rabbitConfigKeys=(
- default_pass
- default_user
- default_vhost
- vm_memory_high_watermark
-)
-fileConfigKeys=(
- management_ssl_cacertfile
- management_ssl_certfile
- management_ssl_keyfile
- ssl_cacertfile
- ssl_certfile
- ssl_keyfile
-)
-allConfigKeys=(
- "${managementConfigKeys[@]/#/management_}"
- "${rabbitConfigKeys[@]}"
- "${sslConfigKeys[@]/#/ssl_}"
-)
-
-declare -A configDefaults=(
- [management_ssl_fail_if_no_peer_cert]='false'
- [management_ssl_verify]='verify_none'
-
- [ssl_fail_if_no_peer_cert]='true'
- [ssl_verify]='verify_peer'
-)
+#!/usr/bin/env bash
+set -euo pipefail
# allow the container to be started with `--user`
if [[ "$1" == rabbitmq* ]] && [ "$(id -u)" = '0' ]; then
- # this needs to happen late enough that we have the SSL config
- # https://github.com/docker-library/rabbitmq/issues/283
- for conf in "${allConfigKeys[@]}"; do
- var="RABBITMQ_${conf^^}"
- val="${!var:-}"
- [ -n "$val" ] || continue
- case "$conf" in
- *_ssl_*file | ssl_*file )
- if [ -f "$val" ] && ! gosu rabbitmq test -r "$val"; then
- newFile="/tmp/rabbitmq-ssl/$conf.pem"
- echo >&2
- echo >&2 "WARNING: '$val' ($var) is not readable by rabbitmq ($(id rabbitmq)); copying to '$newFile'"
- echo >&2
- cat "$val" > "$newFile"
- chown rabbitmq "$newFile"
- chmod 0400 "$newFile"
- eval 'export '$var'="$newFile"'
- fi
- ;;
- esac
- done
-
if [ "$1" = 'rabbitmq-server' ]; then
find /var/lib/rabbitmq \! -user rabbitmq -exec chown rabbitmq '{}' +
fi
@@ -110,298 +10,44 @@ if [[ "$1" == rabbitmq* ]] && [ "$(id -u)" = '0' ]; then
exec gosu rabbitmq "$BASH_SOURCE" "$@"
fi
-haveConfig=
-haveSslConfig=
-haveManagementSslConfig=
-for fileEnvKey in "${fileEnvKeys[@]}"; do file_env "RABBITMQ_${fileEnvKey^^}"; done
-for conf in "${allConfigKeys[@]}"; do
- var="RABBITMQ_${conf^^}"
- val="${!var:-}"
- if [ "$val" ]; then
- if [ "${configDefaults[$conf]:-}" ] && [ "${configDefaults[$conf]}" = "$val" ]; then
- # if the value set is the same as the default, treat it as if it isn't set
- continue
- fi
- haveConfig=1
- case "$conf" in
- ssl_*) haveSslConfig=1 ;;
- management_ssl_*) haveManagementSslConfig=1 ;;
- esac
- fi
-done
-if [ "$haveSslConfig" ]; then
- missing=()
- for sslConf in cacertfile certfile keyfile; do
- var="RABBITMQ_SSL_${sslConf^^}"
- val="${!var}"
- if [ -z "$val" ]; then
- missing+=( "$var" )
- fi
- done
- if [ "${#missing[@]}" -gt 0 ]; then
- {
- echo
- echo 'error: SSL requested, but missing required configuration'
- for miss in "${missing[@]}"; do
- echo " - $miss"
- done
- echo
- } >&2
- exit 1
- fi
-fi
-missingFiles=()
-for conf in "${fileConfigKeys[@]}"; do
- var="RABBITMQ_${conf^^}"
- val="${!var}"
- if [ "$val" ] && [ ! -f "$val" ]; then
- missingFiles+=( "$val ($var)" )
+deprecatedEnvVars=(
+ RABBITMQ_DEFAULT_PASS
+ RABBITMQ_DEFAULT_PASS_FILE
+ RABBITMQ_DEFAULT_USER
+ RABBITMQ_DEFAULT_USER_FILE
+ RABBITMQ_DEFAULT_VHOST
+ RABBITMQ_MANAGEMENT_SSL_CACERTFILE
+ RABBITMQ_MANAGEMENT_SSL_CERTFILE
+ RABBITMQ_MANAGEMENT_SSL_DEPTH
+ RABBITMQ_MANAGEMENT_SSL_FAIL_IF_NO_PEER_CERT
+ RABBITMQ_MANAGEMENT_SSL_KEYFILE
+ RABBITMQ_MANAGEMENT_SSL_VERIFY
+ RABBITMQ_SSL_CACERTFILE
+ RABBITMQ_SSL_CERTFILE
+ RABBITMQ_SSL_DEPTH
+ RABBITMQ_SSL_FAIL_IF_NO_PEER_CERT
+ RABBITMQ_SSL_KEYFILE
+ RABBITMQ_SSL_VERIFY
+ RABBITMQ_VM_MEMORY_HIGH_WATERMARK
+)
+hasOldEnv=
+for old in "${deprecatedEnvVars[@]}"; do
+ if [ -n "${!old:-}" ]; then
+ echo >&2 "error: $old is set but deprecated"
+ hasOldEnv=1
fi
done
-if [ "${#missingFiles[@]}" -gt 0 ]; then
- {
- echo
- echo 'error: files specified, but missing'
- for miss in "${missingFiles[@]}"; do
- echo " - $miss"
- done
- echo
- } >&2
+if [ -n "$hasOldEnv" ]; then
+ echo >&2 'error: deprecated environment variables detected'
+ echo >&2
+ echo >&2 'Please use a configuration file instead; visit https://www.rabbitmq.com/configure.html to learn more'
+ echo >&2
exit 1
fi
-# set defaults for missing values (but only after we're done with all our checking so we don't throw any of that off)
-for conf in "${!configDefaults[@]}"; do
- default="${configDefaults[$conf]}"
- var="RABBITMQ_${conf^^}"
- [ -z "${!var:-}" ] || continue
- eval "export $var=\"\$default\""
-done
-
# if long and short hostnames are not the same, use long hostnames
-if [ "$(hostname)" != "$(hostname -s)" ]; then
+if [ -z "${RABBITMQ_USE_LONGNAME:-}" ] && [ "$(hostname)" != "$(hostname -s)" ]; then
: "${RABBITMQ_USE_LONGNAME:=true}"
fi
-if [ "${RABBITMQ_ERLANG_COOKIE:-}" ]; then
- cookieFile='/var/lib/rabbitmq/.erlang.cookie'
- if [ -e "$cookieFile" ]; then
- if [ "$(cat "$cookieFile" 2>/dev/null)" != "$RABBITMQ_ERLANG_COOKIE" ]; then
- echo >&2
- echo >&2 "warning: $cookieFile contents do not match RABBITMQ_ERLANG_COOKIE"
- echo >&2
- fi
- else
- echo "$RABBITMQ_ERLANG_COOKIE" > "$cookieFile"
- fi
- chmod 600 "$cookieFile"
-fi
-
-configBase="${RABBITMQ_CONFIG_FILE:-/etc/rabbitmq/rabbitmq}"
-oldConfigFile="$configBase.config"
-newConfigFile="$configBase.conf"
-
-shouldWriteConfig="$haveConfig"
-if [ -n "$shouldWriteConfig" ] && [ -f "$oldConfigFile" ]; then
- {
- echo "error: Docker configuration environment variables specified, but old-style (Erlang syntax) configuration file '$oldConfigFile' exists"
- echo " Suggested fixes: (choose one)"
- echo " - remove '$oldConfigFile'"
- echo " - remove any Docker-specific 'RABBITMQ_...' environment variables"
- echo " - convert '$oldConfigFile' to the newer sysctl format ('$newConfigFile'); see https://www.rabbitmq.com/configure.html#config-file"
- } >&2
- exit 1
-fi
-if [ -z "$shouldWriteConfig" ] && [ ! -f "$oldConfigFile" ] && [ ! -f "$newConfigFile" ]; then
- # no config files, we should write one
- shouldWriteConfig=1
-fi
-
-# http://stackoverflow.com/a/2705678/433558
-sed_escape_lhs() {
- echo "$@" | sed -e 's/[]\/$*.^|[]/\\&/g'
-}
-sed_escape_rhs() {
- echo "$@" | sed -e 's/[\/&]/\\&/g'
-}
-rabbit_set_config() {
- local key="$1"; shift
- local val="$1"; shift
-
- [ -e "$newConfigFile" ] || touch "$newConfigFile"
-
- local sedKey="$(sed_escape_lhs "$key")"
- local sedVal="$(sed_escape_rhs "$val")"
- sed -ri \
- "s/^[[:space:]]*(${sedKey}[[:space:]]*=[[:space:]]*)\S.*\$/\1${sedVal}/" \
- "$newConfigFile"
- if ! grep -qE "^${sedKey}[[:space:]]*=" "$newConfigFile"; then
- echo "$key = $val" >> "$newConfigFile"
- fi
-}
-rabbit_comment_config() {
- local key="$1"; shift
-
- [ -e "$newConfigFile" ] || touch "$newConfigFile"
-
- local sedKey="$(sed_escape_lhs "$key")"
- sed -ri \
- "s/^[[:space:]]*#?[[:space:]]*(${sedKey}[[:space:]]*=[[:space:]]*\S.*)\$/# \1/" \
- "$newConfigFile"
-}
-rabbit_env_config() {
- local prefix="$1"; shift
-
- local conf
- for conf; do
- local var="rabbitmq${prefix:+_$prefix}_$conf"
- var="${var^^}"
-
- local key="$conf"
- case "$prefix" in
- ssl) key="ssl_options.$key" ;;
- management_ssl) key="management.ssl.$key" ;;
- esac
-
- local val="${!var:-}"
- local rawVal="$val"
- case "$conf" in
- fail_if_no_peer_cert)
- case "${val,,}" in
- false|no|0|'') rawVal='false' ;;
- true|yes|1|*) rawVal='true' ;;
- esac
- ;;
-
- vm_memory_high_watermark) continue ;; # handled separately
- esac
-
- if [ -n "$rawVal" ]; then
- rabbit_set_config "$key" "$rawVal"
- else
- rabbit_comment_config "$key"
- fi
- done
-}
-
-if [ "$1" = 'rabbitmq-server' ] && [ "$shouldWriteConfig" ]; then
- rabbit_set_config 'loopback_users.guest' 'false'
-
- # determine whether to set "vm_memory_high_watermark" (based on cgroups)
- memTotalKb=
- if [ -r /proc/meminfo ]; then
- memTotalKb="$(awk -F ':? +' '$1 == "MemTotal" { print $2; exit }' /proc/meminfo)"
- fi
- memLimitB=
- if [ -r /sys/fs/cgroup/memory/memory.limit_in_bytes ]; then
- # "18446744073709551615" is a valid value for "memory.limit_in_bytes", which is too big for Bash math to handle
- # "$(( 18446744073709551615 / 1024 ))" = 0; "$(( 18446744073709551615 * 40 / 100 ))" = 0
- memLimitB="$(awk -v totKb="$memTotalKb" '{
- limB = $0;
- limKb = limB / 1024;
- if (!totKb || limKb < totKb) {
- printf "%.0f\n", limB;
- }
- }' /sys/fs/cgroup/memory/memory.limit_in_bytes)"
- fi
- if [ -n "$memLimitB" ]; then
- # if we have a cgroup memory limit, let's inform RabbitMQ of what it is (so it can calculate vm_memory_high_watermark properly)
- # https://github.com/rabbitmq/rabbitmq-server/pull/1234
- rabbit_set_config 'total_memory_available_override_value' "$memLimitB"
- fi
- # https://www.rabbitmq.com/memory.html#memsup-usage
- if [ "${RABBITMQ_VM_MEMORY_HIGH_WATERMARK:-}" ]; then
- # https://github.com/docker-library/rabbitmq/pull/105#issuecomment-242165822
- vmMemoryHighWatermark="$(
- echo "$RABBITMQ_VM_MEMORY_HIGH_WATERMARK" | awk '
- /^[0-9]*[.][0-9]+$|^[0-9]+([.][0-9]+)?%$/ {
- perc = $0;
- if (perc ~ /%$/) {
- gsub(/%$/, "", perc);
- perc = perc / 100;
- }
- if (perc > 1.0 || perc < 0.0) {
- printf "error: invalid percentage for vm_memory_high_watermark: %s (must be >= 0%%, <= 100%%)\n", $0 > "/dev/stderr";
- exit 1;
- }
- printf "vm_memory_high_watermark.relative %0.03f\n", perc;
- next;
- }
- /^[0-9]+$/ {
- printf "vm_memory_high_watermark.absolute %s\n", $0;
- next;
- }
- /^[0-9]+([.][0-9]+)?[a-zA-Z]+$/ {
- printf "vm_memory_high_watermark.absolute %s\n", $0;
- next;
- }
- {
- printf "error: unexpected input for vm_memory_high_watermark: %s\n", $0;
- exit 1;
- }
- '
- )"
- if [ "$vmMemoryHighWatermark" ]; then
- vmMemoryHighWatermarkKey="${vmMemoryHighWatermark%% *}"
- vmMemoryHighWatermarkVal="${vmMemoryHighWatermark#$vmMemoryHighWatermarkKey }"
- rabbit_set_config "$vmMemoryHighWatermarkKey" "$vmMemoryHighWatermarkVal"
- case "$vmMemoryHighWatermarkKey" in
- # make sure we only set one or the other
- 'vm_memory_high_watermark.absolute') rabbit_comment_config 'vm_memory_high_watermark.relative' ;;
- 'vm_memory_high_watermark.relative') rabbit_comment_config 'vm_memory_high_watermark.absolute' ;;
- esac
- fi
- fi
-
- if [ "$haveSslConfig" ]; then
- rabbit_set_config 'listeners.ssl.default' 5671
- rabbit_env_config 'ssl' "${sslConfigKeys[@]}"
- else
- rabbit_set_config 'listeners.tcp.default' 5672
- fi
-
- rabbit_env_config '' "${rabbitConfigKeys[@]}"
-
- # if management plugin is installed, generate config for it
- # https://www.rabbitmq.com/management.html#configuration
- if [ "$(rabbitmq-plugins list -q -m -e 'rabbitmq_management$')" ]; then
- if [ "$haveManagementSslConfig" ]; then
- rabbit_set_config 'management.listener.port' 15671
- rabbit_set_config 'management.listener.ssl' 'true'
- rabbit_env_config 'management_ssl' "${sslConfigKeys[@]}"
- else
- rabbit_set_config 'management.listener.port' 15672
- rabbit_set_config 'management.listener.ssl' 'false'
- fi
-
- # if definitions file exists, then load it
- # https://www.rabbitmq.com/management.html#load-definitions
- managementDefinitionsFile='/etc/rabbitmq/definitions.json'
- if [ -f "$managementDefinitionsFile" ]; then
- # see also https://github.com/docker-library/rabbitmq/pull/112#issuecomment-271485550
- rabbit_set_config 'management.load_definitions' "$managementDefinitionsFile"
- fi
- fi
-fi
-
-combinedSsl='/tmp/rabbitmq-ssl/combined.pem'
-if [ "$haveSslConfig" ] && [[ "$1" == rabbitmq* ]] && [ ! -f "$combinedSsl" ]; then
- # Create combined cert
- {
- cat "$RABBITMQ_SSL_CERTFILE"
- echo # https://github.com/docker-library/rabbitmq/issues/357#issuecomment-517755647
- cat "$RABBITMQ_SSL_KEYFILE"
- } > "$combinedSsl"
- chmod 0400 "$combinedSsl"
-fi
-if [ "$haveSslConfig" ] && [ -f "$combinedSsl" ]; then
- # More ENV vars for make clustering happiness
- # we don't handle clustering in this script, but these args should ensure
- # clustered SSL-enabled members will talk nicely
- export ERL_SSL_PATH="$(erl -eval 'io:format("~p", [code:lib_dir(ssl, ebin)]),halt().' -noshell)"
- sslErlArgs="-pa $ERL_SSL_PATH -proto_dist inet_tls -ssl_dist_opt server_certfile $combinedSsl -ssl_dist_opt server_secure_renegotiate true client_secure_renegotiate true"
- export RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS="${RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS:-} $sslErlArgs"
- export RABBITMQ_CTL_ERL_ARGS="${RABBITMQ_CTL_ERL_ARGS:-} $sslErlArgs"
-fi
-
-exec "$@"
+exec "$@" \ No newline at end of file
diff --git a/packaging/docker-image/otp-versions/otp-max.yaml b/packaging/docker-image/otp-versions/otp-max.yaml
new file mode 100644
index 0000000000..20d53d80cc
--- /dev/null
+++ b/packaging/docker-image/otp-versions/otp-max.yaml
@@ -0,0 +1,8 @@
+otp: '24.1.5'
+# make -C packaging/docker-image find-otp-sha256 OTP_VERSION_MATCH=24.0
+otp_sha256: 3ca6768147653bca1b429a767377dee3351004b719099d34004633833dbfbc23
+# Which is the max supported Elixir?
+# https://github.com/rabbitmq/rabbitmq-server/blob/master/deps/rabbitmq_cli/mix.exs#L14
+# Which is the latest Elixir release?
+# https://github.com/elixir-lang/elixir/tags
+elixir: '1.12.2' \ No newline at end of file
diff --git a/packaging/docker-image/otp-versions/otp-min.yaml b/packaging/docker-image/otp-versions/otp-min.yaml
new file mode 100644
index 0000000000..8c63172fab
--- /dev/null
+++ b/packaging/docker-image/otp-versions/otp-min.yaml
@@ -0,0 +1,6 @@
+otp: '23.2'
+# make -C packaging/docker-image find-otp-sha256 OTP_VERSION_MATCH=23.2
+otp_sha256: 79f2233a960cc427607d52a7b7e9e5b08afba96a4d87ced4efb64e902b44160c
+# Which is the min supported Elixir?
+# https://github.com/rabbitmq/rabbitmq-server/blob/master/deps/rabbitmq_cli/mix.exs#L14
+elixir: '1.10.4' \ No newline at end of file
diff --git a/packaging/windows-exe/Makefile b/packaging/windows-exe/Makefile
deleted file mode 100644
index 2d2adeefdb..0000000000
--- a/packaging/windows-exe/Makefile
+++ /dev/null
@@ -1,36 +0,0 @@
-ifeq ($(PACKAGES_DIR),)
-ZIP_DIR = ../windows
-else
-ZIP_DIR = $(PACKAGES_DIR)
-endif
-ZIP = $(notdir $(wildcard $(ZIP_DIR)/rabbitmq-server-windows-*.zip))
-
-VERSION = $(patsubst rabbitmq-server-windows-%.zip,%,$(ZIP))
-PRODUCT_VERSION ?= $(shell echo "$(VERSION)" | sed -E 's/([0-9]+\.[0-9]+\.[0-9]+).*/\1.0/')
-
-unexport DEPS_DIR
-unexport ERL_LIBS
-MAKEOVERRIDES =
-
-all: dist
- @:
-
-dist: rabbitmq-$(VERSION).nsi rabbitmq_server-$(VERSION)
- makensis -V2 rabbitmq-$(VERSION).nsi
-
- if test "$(PACKAGES_DIR)"; then \
- mkdir -p "$(PACKAGES_DIR)"; \
- mv rabbitmq-server-$(VERSION).exe "$(PACKAGES_DIR)"; \
- fi
-
-rabbitmq-$(VERSION).nsi: rabbitmq_nsi.in
- sed \
- -e 's|%%VERSION%%|$(VERSION)|' \
- -e 's|%%PRODUCT_VERSION%%|$(PRODUCT_VERSION)|' \
- $< > $@
-
-rabbitmq_server-$(VERSION):
- unzip -q $(ZIP_DIR)/$(ZIP)
-
-clean:
- rm -rf rabbitmq-*.nsi rabbitmq_server-* rabbitmq-server-*.exe
diff --git a/packaging/windows-exe/plugins/ExecDos.dll b/packaging/windows-exe/plugins/ExecDos.dll
deleted file mode 100644
index 0d8a871a9d..0000000000
--- a/packaging/windows-exe/plugins/ExecDos.dll
+++ /dev/null
Binary files differ
diff --git a/packaging/windows-exe/plugins/ShellLink.dll b/packaging/windows-exe/plugins/ShellLink.dll
deleted file mode 100755
index f57ded34da..0000000000
--- a/packaging/windows-exe/plugins/ShellLink.dll
+++ /dev/null
Binary files differ
diff --git a/packaging/windows-exe/rabbitmq.ico b/packaging/windows-exe/rabbitmq.ico
deleted file mode 100644
index 5e169a7996..0000000000
--- a/packaging/windows-exe/rabbitmq.ico
+++ /dev/null
Binary files differ
diff --git a/packaging/windows-exe/rabbitmq_nsi.in b/packaging/windows-exe/rabbitmq_nsi.in
deleted file mode 100644
index f1f4d678fa..0000000000
--- a/packaging/windows-exe/rabbitmq_nsi.in
+++ /dev/null
@@ -1,299 +0,0 @@
-; Use the "Modern" UI
-!include MUI2.nsh
-!include LogicLib.nsh
-!include WinMessages.nsh
-!include FileFunc.nsh
-!include WordFunc.nsh
-!include x64.nsh
-
-!addplugindir plugins
-
-!define env_hklm 'HKLM "SYSTEM\CurrentControlSet\Control\Session Manager\Environment"'
-!define uninstall "Software\Microsoft\Windows\CurrentVersion\Uninstall\RabbitMQ"
-!define MUI_FINISHPAGE_NOAUTOCLOSE
-!define MUI_UNFINISHPAGE_NOAUTOCLOSE
-
-;--------------------------------
-
-; The name of the installer
-Name "RabbitMQ Server %%VERSION%%"
-
-; The file to write
-OutFile "rabbitmq-server-%%VERSION%%.exe"
-
-; Icons
-!define MUI_ICON "rabbitmq.ico"
-
-; The default installation directory is empty. The .onInit function
-; below takes care of selecting the appropriate (32-bit vs. 64-bit)
-; "Program Files".
-InstallDir ""
-
-; Registry key to check for directory (so if you install again, it will
-; overwrite the old one automatically)
-InstallDirRegKey HKLM "Software\VMware, Inc.\RabbitMQ Server" "Install_Dir"
-
-; Request application privileges for Windows Vista
-RequestExecutionLevel admin
-
-SetCompressor /solid lzma
-
-;--------------------------------
-
-; Pages
-
-
-; !insertmacro MUI_PAGE_LICENSE "..\..\LICENSE-MPL-RabbitMQ"
- !insertmacro MUI_PAGE_COMPONENTS
- !insertmacro MUI_PAGE_DIRECTORY
- !insertmacro MUI_PAGE_INSTFILES
- !insertmacro MUI_PAGE_FINISH
-
- !insertmacro MUI_UNPAGE_CONFIRM
- !insertmacro MUI_UNPAGE_INSTFILES
- !define MUI_FINISHPAGE_TEXT "RabbitMQ Server %%VERSION%% has been uninstalled from your computer.$\n$\nPlease note that the log and database directories located at $APPDATA\RabbitMQ have not been removed. You can remove them manually if desired."
- !insertmacro MUI_UNPAGE_FINISH
-
-;--------------------------------
-;Languages
-
- !insertmacro MUI_LANGUAGE "English"
-
-;--------------------------------
-
-VIProductVersion "%%PRODUCT_VERSION%%"
-VIAddVersionKey /LANG=${LANG_ENGLISH} "ProductVersion" "%%VERSION%%"
-VIAddVersionKey /LANG=${LANG_ENGLISH} "ProductName" "RabbitMQ Server"
-;VIAddVersionKey /LANG=${LANG_ENGLISH} "Comments" ""
-VIAddVersionKey /LANG=${LANG_ENGLISH} "CompanyName" "VMware, Inc."
-;VIAddVersionKey /LANG=${LANG_ENGLISH} "LegalTrademarks" "" ; TODO ?
-VIAddVersionKey /LANG=${LANG_ENGLISH} "LegalCopyright" "Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved."
-VIAddVersionKey /LANG=${LANG_ENGLISH} "FileDescription" "RabbitMQ Server"
-VIAddVersionKey /LANG=${LANG_ENGLISH} "FileVersion" "%%VERSION%%"
-
-; The stuff to install
-Section "RabbitMQ Server (required)" Rabbit
-
- SectionIn RO
-
- ; Set output path to the installation directory.
- SetOutPath $INSTDIR
-
- ; Put files there
- File /r "rabbitmq_server-%%VERSION%%"
- File "rabbitmq.ico"
-
- ; Set output path to the user's data directory
- SetOutPath $APPDATA\RabbitMQ
-
- IfFileExists advanced.config 0 +2
- Goto config_written
- IfFileExists rabbitmq.config 0 +3
- Rename rabbitmq.config advanced.config
- Goto config_written
- ClearErrors
- FileOpen $0 advanced.config w
- IfErrors config_written
- FileWrite $0 "[]."
- FileClose $0
- config_written:
-
- ; Write the installation path into the registry
- WriteRegStr HKLM "SOFTWARE\VMware, Inc.\RabbitMQ Server" "Install_Dir" "$INSTDIR"
-
- ; Write the uninstall keys for Windows
- WriteRegStr HKLM ${uninstall} "DisplayName" "RabbitMQ Server %%VERSION%%"
- WriteRegStr HKLM ${uninstall} "UninstallString" "$INSTDIR\uninstall.exe"
- WriteRegStr HKLM ${uninstall} "DisplayIcon" "$INSTDIR\rabbitmq.ico"
- WriteRegStr HKLM ${uninstall} "Publisher" "VMware, Inc."
- WriteRegStr HKLM ${uninstall} "DisplayVersion" "%%VERSION%%"
- WriteRegDWORD HKLM ${uninstall} "NoModify" 1
- WriteRegDWORD HKLM ${uninstall} "NoRepair" 1
-
- ${GetSize} "$INSTDIR" "/S=0K" $0 $1 $2
- IntFmt $0 "0x%08X" $0
- WriteRegDWORD HKLM "${uninstall}" "EstimatedSize" "$0"
-
- WriteUninstaller "uninstall.exe"
-SectionEnd
-
-;--------------------------------
-
-Section "RabbitMQ Service" RabbitService
- DetailPrint "Installing RabbitMQ service..."
- ExecDos::exec /DETAILED '"$INSTDIR\rabbitmq_server-%%VERSION%%\sbin\rabbitmq-service.bat" install' ""
- DetailPrint "Starting RabbitMQ service..."
- ExecDos::exec /DETAILED '"$INSTDIR\rabbitmq_server-%%VERSION%%\sbin\rabbitmq-service.bat" start' ""
- ReadEnvStr $1 "HOMEDRIVE"
- ReadEnvStr $2 "HOMEPATH"
- Delete "$1$2\.erlang.cookie"
- ${If} ${RunningX64}
- ${DisableX64FSRedirection}
- ${EndIf}
- IfFileExists "$SYSDIR\config\systemprofile\.erlang.cookie" 0 cookie_check_windir
- CopyFiles "$SYSDIR\config\systemprofile\.erlang.cookie" "$1$2\.erlang.cookie"
- Goto cookie_done
- cookie_check_windir:
- IfFileExists "$WINDIR\.erlang.cookie" 0 cookie_done
- CopyFiles "$WINDIR\.erlang.cookie" "$1$2\.erlang.cookie"
- cookie_done:
- ${If} ${RunningX64}
- ${EnableX64FSRedirection}
- ${EndIf}
-SectionEnd
-
-;--------------------------------
-
-Section "Start Menu" RabbitStartMenu
- ; In case the service is not installed, or the service installation fails,
- ; make sure these exist or Explorer will get confused.
- CreateDirectory "$APPDATA\RabbitMQ\log"
- CreateDirectory "$APPDATA\RabbitMQ\db"
-
- CreateDirectory "$SMPROGRAMS\RabbitMQ Server"
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\Uninstall RabbitMQ.lnk" "$INSTDIR\uninstall.exe" "" "$INSTDIR\uninstall.exe" 0
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Plugins.lnk" "$INSTDIR\rabbitmq_server-%%VERSION%%\plugins"
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Logs.lnk" "$APPDATA\RabbitMQ\log"
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Database Directory.lnk" "$APPDATA\RabbitMQ\db"
-
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Service - (re)install.lnk" "%comspec%" '/k "$INSTDIR\rabbitmq_server-%%VERSION%%\sbin\rabbitmq-service.bat" install & if not errorlevel 1 exit /b 0' "$INSTDIR\rabbitmq.ico"
- ShellLink::SetRunAsAdministrator "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Service - (re)install.lnk"
-
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Service - remove.lnk" "%comspec%" '/k "$INSTDIR\rabbitmq_server-%%VERSION%%\sbin\rabbitmq-service.bat" remove & if not errorlevel 1 exit /b 0' "$INSTDIR\rabbitmq.ico"
- ShellLink::SetRunAsAdministrator "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Service - remove.lnk"
-
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Service - start.lnk" "%comspec%" '/k "$INSTDIR\rabbitmq_server-%%VERSION%%\sbin\rabbitmq-service.bat" start & if not errorlevel 1 exit /b 0' "$INSTDIR\rabbitmq.ico"
- ShellLink::SetRunAsAdministrator "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Service - start.lnk"
-
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Service - stop.lnk" "%comspec%" '/k "$INSTDIR\rabbitmq_server-%%VERSION%%\sbin\rabbitmq-service.bat" stop & if not errorlevel 1 exit /b 0' "$INSTDIR\rabbitmq.ico"
- ShellLink::SetRunAsAdministrator "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Service - stop.lnk"
-
- SetOutPath "$INSTDIR\rabbitmq_server-%%VERSION%%\sbin"
- CreateShortCut "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Command Prompt (sbin dir).lnk" "$SYSDIR\cmd.exe" "/k cd $INSTDIR\rabbitmq_server-%%VERSION%%\sbin" "$SYSDIR\cmd.exe"
- ShellLink::SetRunAsAdministrator "$SMPROGRAMS\RabbitMQ Server\RabbitMQ Command Prompt (sbin dir).lnk"
-
- SetOutPath $INSTDIR
-SectionEnd
-
-;--------------------------------
-
-; Section descriptions
-
-LangString DESC_Rabbit ${LANG_ENGLISH} "The RabbitMQ Server."
-LangString DESC_RabbitService ${LANG_ENGLISH} "Set up RabbitMQ as a Windows Service."
-LangString DESC_RabbitStartMenu ${LANG_ENGLISH} "Add some useful links to the start menu."
-
-!insertmacro MUI_FUNCTION_DESCRIPTION_BEGIN
- !insertmacro MUI_DESCRIPTION_TEXT ${Rabbit} $(DESC_Rabbit)
- !insertmacro MUI_DESCRIPTION_TEXT ${RabbitService} $(DESC_RabbitService)
- !insertmacro MUI_DESCRIPTION_TEXT ${RabbitStartMenu} $(DESC_RabbitStartMenu)
-!insertmacro MUI_FUNCTION_DESCRIPTION_END
-
-;--------------------------------
-
-; Uninstaller
-
-Section "Uninstall"
-
- ; Remove registry keys
- DeleteRegKey HKLM ${uninstall}
- DeleteRegKey HKLM "SOFTWARE\VMware, Inc.\RabbitMQ Server"
-
- ; TODO these will fail if the service is not installed - do we care?
- DetailPrint "Stopping RabbitMQ service..."
- ExecDos::exec /DETAILED '"$INSTDIR\rabbitmq_server-%%VERSION%%\sbin\rabbitmq-service.bat" stop' ""
- DetailPrint "Removing RabbitMQ service..."
- ExecDos::exec /DETAILED '"$INSTDIR\rabbitmq_server-%%VERSION%%\sbin\rabbitmq-service.bat" remove' ""
-
- ; Remove files and uninstaller
- RMDir /r "$INSTDIR\rabbitmq_server-%%VERSION%%"
- Delete "$INSTDIR\rabbitmq.ico"
- Delete "$INSTDIR\uninstall.exe"
- RMDir "$INSTDIR"
-
- ; Remove start menu items
- RMDir /r "$SMPROGRAMS\RabbitMQ Server"
-
- DeleteRegValue ${env_hklm} ERLANG_HOME
- SendMessage ${HWND_BROADCAST} ${WM_WININICHANGE} 0 "STR:Environment" /TIMEOUT=5000
-
-SectionEnd
-
-;--------------------------------
-
-; Functions
-
-Function .onInit
- ; By default, always install in "\Program Files", not matter if we run
- ; on a 32-bit or 64-bit Windows.
- ${If} $INSTDIR == "";
- ${If} ${RunningX64}
- StrCpy $INSTDIR "$PROGRAMFILES64\RabbitMQ Server"
- ${Else}
- StrCpy $INSTDIR "$PROGRAMFILES\RabbitMQ Server"
- ${EndIf}
- ${EndIf}
-
- Call findErlang
-
- ReadRegStr $0 HKLM ${uninstall} "UninstallString"
- ${If} $0 != ""
- MessageBox MB_OKCANCEL|MB_ICONEXCLAMATION "RabbitMQ is already installed. $\n$\nClick 'OK' to remove the previous version or 'Cancel' to cancel this installation." /SD IDOK IDOK rununinstall IDCANCEL norun
-
- norun:
- Abort
-
- rununinstall:
- ;Run the uninstaller
- ClearErrors
- ExecWait '"$INSTDIR\uninstall.exe" /S _?=$INSTDIR'
- Delete "$INSTDIR\uninstall.exe"
- RMDir "$INSTDIR"
- ; the unistaller removes the ERLANG_HOME.
- ; called again since this is an update
- Call findErlang
-
- ${EndIf}
-FunctionEnd
-
-Function findErlang
-
- StrCpy $0 0
- StrCpy $2 "not-found"
- ${Do}
- EnumRegKey $1 HKLM Software\Ericsson\Erlang $0
- ${If} $1 = ""
- ${Break}
- ${EndIf}
- ${If} $1 <> "ErlSrv"
- StrCpy $2 $1
- ${EndIf}
-
- IntOp $0 $0 + 1
- ${Loop}
-
- ${If} $2 = "not-found"
- MessageBox MB_YESNO|MB_ICONEXCLAMATION "Erlang could not be detected.$\nYou must install Erlang before installing RabbitMQ. Would you like the installer to open a browser window to the Erlang download site?" IDNO abort
- ExecShell "open" "https://www.erlang.org/download.html"
- abort:
- Abort
- ${Else}
- ${VersionCompare} $2 "8.3" $0
-
- ${If} $0 = 2
- MessageBox MB_OK|MB_ICONEXCLAMATION "Your installed version of Erlang ($2) is too old. Please install a more recent version."
- Abort
- ${EndIf}
-
- ReadRegStr $0 HKLM "Software\Ericsson\Erlang\$2" ""
-
- ; See https://nsis.sourceforge.io/Setting_Environment_Variables
- WriteRegExpandStr ${env_hklm} ERLANG_HOME $0
- SendMessage ${HWND_BROADCAST} ${WM_WININICHANGE} 0 "STR:Environment" /TIMEOUT=5000
-
- ; On Windows XP changing the permanent environment does not change *our*
- ; environment, so do that as well.
- System::Call 'Kernel32::SetEnvironmentVariableA(t, t) i("ERLANG_HOME", "$0").r0'
- ${EndIf}
-
-FunctionEnd
diff --git a/packaging/windows/Makefile b/packaging/windows/Makefile
deleted file mode 100644
index 1ff655aeaa..0000000000
--- a/packaging/windows/Makefile
+++ /dev/null
@@ -1,66 +0,0 @@
-SOURCE_DIST_FILE ?= $(wildcard ../../../rabbitmq-server-*.tar.xz)
-
-ifneq ($(filter-out clean,$(MAKECMDGOALS)),)
-ifeq ($(SOURCE_DIST_FILE),)
-$(error Cannot find source archive; please specify SOURCE_DIST_FILE)
-endif
-ifneq ($(words $(SOURCE_DIST_FILE)),1)
-$(error Multiple source archives found; please specify SOURCE_DIST_FILE)
-endif
-
-VERSION ?= $(patsubst rabbitmq-server-%.tar.xz,%,$(notdir $(SOURCE_DIST_FILE)))
-ifeq ($(VERSION),)
-$(error Cannot determine version; please specify VERSION)
-endif
-endif
-
-SOURCE_DIR = rabbitmq-server-$(VERSION)
-TARGET_DIR = rabbitmq_server-$(VERSION)
-TARGET_ZIP = rabbitmq-server-windows-$(VERSION)
-
-unexport DEPS_DIR
-unexport ERL_LIBS
-MAKEOVERRIDES =
-
-# See comment below about `$(HOME)` and Freedesktop.org variables.
-XDG_DATA_HOME ?= $(HOME)/.local/share
-XDG_CONFIG_HOME ?= $(HOME)/.config
-
-all: dist
- @:
-
-dist:
-# Log Erlang version.
- @echo
- @echo '--------------------------------------------------'
- @echo "Erlang and Elixir versions used to compile:"
- @elixir --version
- @echo '--------------------------------------------------'
- @echo
- xzcat $(SOURCE_DIST_FILE) | tar -xf -
-
-# We explicitely set $HOME as a Make variable below because some package
-# builders do that, as part of cleaning the build environment. It
-# exercises our hack to convince mix(1) to work offline because that
-# hack depends on `$HOME`. A Make variable on the command line takes
-# precedence over variables declared in Makefiles, so our hack needs
-# to consider this. We do the same with the Freedesktop.org-specified
-# variables ($XDG_*_HOME).
- $(MAKE) -C $(SOURCE_DIR) install-windows \
- DESTDIR=$(abspath $(TARGET_DIR)) \
- WINDOWS_PREFIX= \
- HOME="$(HOME)" \
- XDG_DATA_HOME="$(XDG_DATA_HOME)" \
- XDG_CONFIG_HOME="$(XDG_CONFIG_HOME)"
-
- cp -a README-etc $(TARGET_DIR)/etc/README.txt
-
- zip -q -r $(TARGET_ZIP).zip $(TARGET_DIR)
-
- if test "$(PACKAGES_DIR)"; then \
- mkdir -p "$(PACKAGES_DIR)"; \
- mv $(TARGET_ZIP).zip "$(PACKAGES_DIR)"; \
- fi
-
-clean:
- rm -rf rabbitmq-server-* rabbitmq_server-*
diff --git a/packaging/windows/README-etc b/packaging/windows/README-etc
deleted file mode 100644
index b431247c6b..0000000000
--- a/packaging/windows/README-etc
+++ /dev/null
@@ -1,7 +0,0 @@
-In this directory you can find an example configuration file for RabbitMQ.
-
-Note that this directory is *not* where the real RabbitMQ
-configuration lives. The default location for the real configuration
-file is %APPDATA%\RabbitMQ\rabbitmq.config.
-
-%APPDATA% usually expands to C:\Users\%USERNAME%\AppData\Roaming or similar.
diff --git a/plugins.mk b/plugins.mk
index aa9863cfbf..17739f535f 100644
--- a/plugins.mk
+++ b/plugins.mk
@@ -32,6 +32,7 @@ PLUGINS := rabbitmq_amqp1_0 \
rabbitmq_shovel_management \
rabbitmq_stomp \
rabbitmq_stream \
+ rabbitmq_stream_management \
rabbitmq_top \
rabbitmq_tracing \
rabbitmq_trust_store \
diff --git a/rabbitmq-components.mk b/rabbitmq-components.mk
index b2a3be8b35..7488f22f01 100644
--- a/rabbitmq-components.mk
+++ b/rabbitmq-components.mk
@@ -37,73 +37,72 @@ endif
# topic branch or fallback to `stable` or `master` whichever was the
# base of the topic branch.
-dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
-dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
-dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
-
-dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+dep_amqp_client = git_rmq-subfolder rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_amqp10_client = git_rmq-subfolder rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_amqp10_common = git_rmq-subfolder rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit = git_rmq-subfolder rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common = git_rmq-subfolder rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0 = git_rmq-subfolder rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_cache = git_rmq-subfolder rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http = git_rmq-subfolder rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap = git_rmq-subfolder rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_oauth2 = git_rmq-subfolder rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl = git_rmq-subfolder rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_aws = git_rmq-subfolder rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_cli = git_rmq-subfolder rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen = git_rmq-subfolder rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq-subfolder rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_client_helpers = git_rmq-subfolder rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers = git_rmq-subfolder rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange = git_rmq-subfolder rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation = git_rmq-subfolder rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management = git_rmq-subfolder rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange = git_rmq-subfolder rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management = git_rmq-subfolder rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent = git_rmq-subfolder rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt = git_rmq-subfolder rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_aws = git_rmq-subfolder rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_common = git_rmq-subfolder rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_consul = git_rmq-subfolder rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_etcd = git_rmq-subfolder rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_k8s = git_rmq-subfolder rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_prometheus = git_rmq-subfolder rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_random_exchange = git_rmq-subfolder rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange = git_rmq-subfolder rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding = git_rmq-subfolder rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel = git_rmq-subfolder rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management = git_rmq-subfolder rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp = git_rmq-subfolder rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stream = git_rmq-subfolder rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stream_common = git_rmq-subfolder rabbitmq-stream-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stream_management = git_rmq-subfolder rabbitmq-stream-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top = git_rmq-subfolder rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing = git_rmq-subfolder rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store = git_rmq-subfolder rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch = git_rmq-subfolder rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp = git_rmq-subfolder rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples = git_rmq-subfolder rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt = git_rmq-subfolder rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples = git_rmq-subfolder rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
# Third-party dependencies version pinning.
#
@@ -114,13 +113,13 @@ dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(curre
dep_accept = hex 0.3.5
dep_cowboy = hex 2.8.0
dep_cowlib = hex 2.9.1
-dep_jsx = hex 2.11.0
-dep_lager = hex 3.8.0
-dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
-dep_ra = git https://github.com/rabbitmq/ra.git master
-dep_ranch = hex 1.7.1
+dep_jsx = hex 3.1.0
+dep_looking_glass = git https://github.com/rabbitmq/looking_glass master
+dep_prometheus = git https://github.com/deadtrickster/prometheus.erl 06425c21a39c1564164f1cc3fe5bdfa8b23b1f78
+dep_ra = git https://github.com/rabbitmq/ra.git main
+dep_ranch = hex 2.1.0
dep_recon = hex 2.5.1
-dep_observer_cli = hex 1.5.4
+dep_observer_cli = hex 1.7.1
dep_stdout_formatter = hex 0.2.4
dep_sysmon_handler = hex 1.3.0
@@ -177,6 +176,8 @@ RABBITMQ_COMPONENTS = amqp_client \
rabbitmq_shovel_management \
rabbitmq_stomp \
rabbitmq_stream \
+ rabbitmq_stream_common \
+ rabbitmq_stream_management \
rabbitmq_toke \
rabbitmq_top \
rabbitmq_tracing \
@@ -304,6 +305,31 @@ define dep_fetch_git_rmq
git remote set-url --push origin "$$$$push_url")
endef
+define dep_fetch_git_rmq-subfolder
+ fetch_url1='https://github.com/rabbitmq/rabbitmq-server.git'; \
+ fetch_url2='git@github.com:rabbitmq/rabbitmq-server.git'; \
+ if [ ! -d $(ERLANG_MK_TMP)/rabbitmq-server ]; then \
+ if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+ git clone -q -n -- "$$$$fetch_url1" $(ERLANG_MK_TMP)/rabbitmq-server; then \
+ fetch_url="$$$$fetch_url1"; \
+ push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),rabbitmq-server)'; \
+ elif git clone -q -n -- "$$$$fetch_url2" $(ERLANG_MK_TMP)/rabbitmq-server; then \
+ fetch_url="$$$$fetch_url2"; \
+ push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),rabbitmq-server)'; \
+ fi; \
+ fi; \
+ cd $(ERLANG_MK_TMP)/rabbitmq-server && ( \
+ $(foreach ref,$(call dep_rmq_commits,$(1)), \
+ git checkout -q $(ref) >/dev/null 2>&1 || \
+ ) \
+ (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+ 1>&2 && false) ) && \
+ (test "$$$$fetch_url" = "$$$$push_url" || \
+ git remote set-url --push origin "$$$$push_url")
+ ln -s $(ERLANG_MK_TMP)/rabbitmq-server/deps/$(call dep_name,$(1)) \
+ $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
# --------------------------------------------------------------------
# Component distribution.
# --------------------------------------------------------------------
diff --git a/rabbitmq.bzl b/rabbitmq.bzl
new file mode 100644
index 0000000000..8e73b5922e
--- /dev/null
+++ b/rabbitmq.bzl
@@ -0,0 +1,235 @@
+load(
+ "@bazel-erlang//:bazel_erlang_lib.bzl",
+ "DEFAULT_ERLC_OPTS",
+ "DEFAULT_TEST_ERLC_OPTS",
+ "erlang_lib",
+ "test_erlang_lib",
+)
+load("@bazel-erlang//:ct_sharded.bzl", "ct_suite", "ct_suite_variant")
+load("//:rabbitmq_home.bzl", "rabbitmq_home")
+load("//:rabbitmq_run.bzl", "rabbitmq_run")
+
+RABBITMQ_ERLC_OPTS = DEFAULT_ERLC_OPTS
+
+RABBITMQ_TEST_ERLC_OPTS = DEFAULT_TEST_ERLC_OPTS + [
+ "+nowarn_export_all",
+]
+
+RABBITMQ_DIALYZER_OPTS = [
+ "-Werror_handling",
+ "-Wunmatched_returns",
+]
+
+APP_VERSION = "3.10.0"
+
+ALL_PLUGINS = [
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbitmq_amqp1_0:bazel_erlang_lib",
+ "//deps/rabbitmq_auth_backend_cache:bazel_erlang_lib",
+ "//deps/rabbitmq_auth_backend_http:bazel_erlang_lib",
+ "//deps/rabbitmq_auth_backend_ldap:bazel_erlang_lib",
+ "//deps/rabbitmq_auth_backend_oauth2:bazel_erlang_lib",
+ "//deps/rabbitmq_auth_mechanism_ssl:bazel_erlang_lib",
+ "//deps/rabbitmq_consistent_hash_exchange:bazel_erlang_lib",
+ "//deps/rabbitmq_event_exchange:bazel_erlang_lib",
+ "//deps/rabbitmq_federation:bazel_erlang_lib",
+ "//deps/rabbitmq_federation_management:bazel_erlang_lib",
+ "//deps/rabbitmq_jms_topic_exchange:bazel_erlang_lib",
+ "//deps/rabbitmq_management:bazel_erlang_lib",
+ "//deps/rabbitmq_mqtt:bazel_erlang_lib",
+ "//deps/rabbitmq_peer_discovery_aws:bazel_erlang_lib",
+ "//deps/rabbitmq_peer_discovery_consul:bazel_erlang_lib",
+ "//deps/rabbitmq_peer_discovery_etcd:bazel_erlang_lib",
+ "//deps/rabbitmq_peer_discovery_k8s:bazel_erlang_lib",
+ "//deps/rabbitmq_prometheus:bazel_erlang_lib",
+ "//deps/rabbitmq_random_exchange:bazel_erlang_lib",
+ "//deps/rabbitmq_recent_history_exchange:bazel_erlang_lib",
+ "//deps/rabbitmq_sharding:bazel_erlang_lib",
+ "//deps/rabbitmq_shovel:bazel_erlang_lib",
+ "//deps/rabbitmq_shovel_management:bazel_erlang_lib",
+ "//deps/rabbitmq_stomp:bazel_erlang_lib",
+ "//deps/rabbitmq_stream:bazel_erlang_lib",
+ "//deps/rabbitmq_stream_management:bazel_erlang_lib",
+ "//deps/rabbitmq_top:bazel_erlang_lib",
+ "//deps/rabbitmq_tracing:bazel_erlang_lib",
+ "//deps/rabbitmq_trust_store:bazel_erlang_lib",
+ "//deps/rabbitmq_web_dispatch:bazel_erlang_lib",
+ "//deps/rabbitmq_web_mqtt:bazel_erlang_lib",
+ "//deps/rabbitmq_web_stomp:bazel_erlang_lib",
+]
+
+LABELS_WITH_TEST_VERSIONS = [
+ "//deps/amqp10_common:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbit:bazel_erlang_lib",
+ "//deps/rabbit/apps/rabbitmq_prelaunch:bazel_erlang_lib",
+]
+
+def all_plugins(rabbitmq_workspace = "@rabbitmq-server"):
+ return [rabbitmq_workspace + p for p in ALL_PLUGINS]
+
+def with_test_versions(deps):
+ r = []
+ for d in deps:
+ if d in LABELS_WITH_TEST_VERSIONS:
+ r.append(d.replace(":bazel_erlang_lib", ":test_bazel_erlang_lib"))
+ else:
+ r.append(d)
+ return r
+
+def rabbitmq_lib(
+ app_name = "",
+ app_version = APP_VERSION,
+ app_description = "",
+ app_module = "",
+ app_registered = [],
+ app_env = "[]",
+ extra_apps = [],
+ erlc_opts = RABBITMQ_ERLC_OPTS,
+ test_erlc_opts = RABBITMQ_TEST_ERLC_OPTS,
+ first_srcs = [],
+ extra_priv = [],
+ build_deps = [],
+ deps = [],
+ runtime_deps = []):
+ erlang_lib(
+ app_name = app_name,
+ app_version = app_version,
+ app_description = app_description,
+ app_module = app_module,
+ app_registered = app_registered,
+ app_env = app_env,
+ extra_apps = extra_apps,
+ extra_priv = extra_priv,
+ erlc_opts = erlc_opts,
+ first_srcs = first_srcs,
+ build_deps = build_deps,
+ deps = deps,
+ runtime_deps = runtime_deps,
+ )
+
+ test_erlang_lib(
+ app_name = app_name,
+ app_version = app_version,
+ app_description = app_description,
+ app_module = app_module,
+ app_registered = app_registered,
+ app_env = app_env,
+ extra_apps = extra_apps,
+ extra_priv = extra_priv,
+ erlc_opts = test_erlc_opts,
+ first_srcs = first_srcs,
+ build_deps = with_test_versions(build_deps),
+ deps = with_test_versions(deps),
+ runtime_deps = with_test_versions(runtime_deps),
+ )
+
+def rabbitmq_suite(erlc_opts = [], test_env = {}, **kwargs):
+ ct_suite(
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS + erlc_opts,
+ test_env = dict({
+ "RABBITMQ_CT_SKIP_AS_ERROR": "true",
+ }.items() + test_env.items()),
+ **kwargs
+ )
+ return kwargs["name"]
+
+def broker_for_integration_suites():
+ rabbitmq_home(
+ name = "broker-for-tests-home",
+ plugins = [
+ "//deps/rabbit:bazel_erlang_lib",
+ ":bazel_erlang_lib",
+ ],
+ testonly = True,
+ )
+
+ rabbitmq_run(
+ name = "rabbitmq-for-tests-run",
+ home = ":broker-for-tests-home",
+ testonly = True,
+ )
+
+def rabbitmq_integration_suite(
+ package,
+ name = None,
+ tags = [],
+ data = [],
+ erlc_opts = [],
+ additional_hdrs = [],
+ additional_srcs = [],
+ test_env = {},
+ tools = [],
+ deps = [],
+ runtime_deps = [],
+ **kwargs):
+ ct_suite(
+ name = name,
+ suite_name = name,
+ tags = tags,
+ erlc_opts = RABBITMQ_TEST_ERLC_OPTS + erlc_opts,
+ additional_hdrs = additional_hdrs,
+ additional_srcs = additional_srcs,
+ data = data,
+ test_env = dict({
+ "SKIP_MAKE_TEST_DIST": "true",
+ "RABBITMQ_CT_SKIP_AS_ERROR": "true",
+ "RABBITMQ_RUN": "$TEST_SRCDIR/$TEST_WORKSPACE/{}/rabbitmq-for-tests-run".format(package),
+ "RABBITMQCTL": "$TEST_SRCDIR/$TEST_WORKSPACE/{}/broker-for-tests-home/sbin/rabbitmqctl".format(package),
+ "RABBITMQ_PLUGINS": "$TEST_SRCDIR/$TEST_WORKSPACE/{}/broker-for-tests-home/sbin/rabbitmq-plugins".format(package),
+ "RABBITMQ_QUEUES": "$TEST_SRCDIR/$TEST_WORKSPACE/{}/broker-for-tests-home/sbin/rabbitmq-queues".format(package),
+ }.items() + test_env.items()),
+ tools = [
+ ":rabbitmq-for-tests-run",
+ ] + tools,
+ runtime_deps = [
+ "//deps/rabbitmq_cli:elixir_as_bazel_erlang_lib",
+ "//deps/rabbitmq_cli:rabbitmqctl",
+ "//deps/rabbitmq_ct_client_helpers:bazel_erlang_lib",
+ ] + runtime_deps,
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ] + deps,
+ **kwargs
+ )
+
+ ct_suite_variant(
+ name = name + "-mixed",
+ suite_name = name,
+ tags = tags + ["mixed-version-cluster"],
+ data = data,
+ test_env = dict({
+ "SKIP_MAKE_TEST_DIST": "true",
+ "RABBITMQ_FEATURE_FLAGS": "",
+ "RABBITMQ_RUN": "$TEST_SRCDIR/$TEST_WORKSPACE/{}/rabbitmq-for-tests-run".format(package),
+ "RABBITMQCTL": "$TEST_SRCDIR/$TEST_WORKSPACE/{}/broker-for-tests-home/sbin/rabbitmqctl".format(package),
+ "RABBITMQ_PLUGINS": "$TEST_SRCDIR/$TEST_WORKSPACE/{}/broker-for-tests-home/sbin/rabbitmq-plugins".format(package),
+ "RABBITMQ_QUEUES": "$TEST_SRCDIR/$TEST_WORKSPACE/{}/broker-for-tests-home/sbin/rabbitmq-queues".format(package),
+ "RABBITMQ_RUN_SECONDARY": "$TEST_SRCDIR/rabbitmq-server-generic-unix-3.8.22/rabbitmq-run",
+ }.items() + test_env.items()),
+ tools = [
+ ":rabbitmq-for-tests-run",
+ "@rabbitmq-server-generic-unix-3.8.22//:rabbitmq-run",
+ ] + tools,
+ runtime_deps = [
+ "//deps/rabbitmq_cli:elixir_as_bazel_erlang_lib",
+ "//deps/rabbitmq_cli:rabbitmqctl",
+ "//deps/rabbitmq_ct_client_helpers:bazel_erlang_lib",
+ ] + runtime_deps,
+ deps = [
+ "//deps/amqp_client:bazel_erlang_lib",
+ "//deps/rabbit_common:bazel_erlang_lib",
+ "//deps/rabbitmq_ct_helpers:bazel_erlang_lib",
+ ] + deps,
+ **kwargs
+ )
+
+ return name
+
+def assert_suites(suite_names, suite_files):
+ for f in suite_files:
+ sn = f.rpartition("/")[-1].replace(".erl", "")
+ if not sn in suite_names:
+ fail("A bazel rule has not been defined for {} (expected {} in {}".format(f, sn, suite_names))
diff --git a/rabbitmq_home.bzl b/rabbitmq_home.bzl
new file mode 100644
index 0000000000..6d5f284aab
--- /dev/null
+++ b/rabbitmq_home.bzl
@@ -0,0 +1,151 @@
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "ErlangLibInfo", "flat_deps", "path_join")
+load("@bazel-erlang//:ct.bzl", "additional_file_dest_relative_path")
+
+RabbitmqHomeInfo = provider(
+ doc = "An assembled RABBITMQ_HOME dir",
+ fields = {
+ "rabbitmqctl": "rabbitmqctl script from the sbin directory",
+ },
+)
+
+def _copy_script(ctx, script):
+ dest = ctx.actions.declare_file(path_join(ctx.label.name, "sbin", script.basename))
+ args = ctx.actions.args()
+ args.add_all([script, dest])
+ ctx.actions.run(
+ inputs = [script],
+ outputs = [dest],
+ executable = "cp",
+ arguments = [args],
+ )
+ return dest
+
+def link_escript(ctx, escript):
+ e = ctx.attr._rabbitmqctl_escript.files_to_run.executable
+ s = ctx.actions.declare_file(path_join(ctx.label.name, "escript", escript))
+ ctx.actions.symlink(
+ output = s,
+ target_file = e,
+ )
+ return s
+
+def _plugins_dir_links(ctx, plugin):
+ lib_info = plugin[ErlangLibInfo]
+ plugin_path = path_join(
+ ctx.label.name,
+ "plugins",
+ lib_info.lib_name,
+ )
+
+ links = []
+ for f in lib_info.include:
+ o = ctx.actions.declare_file(path_join(plugin_path, "include", f.basename))
+ ctx.actions.symlink(
+ output = o,
+ target_file = f,
+ )
+ links.append(o)
+
+ for f in lib_info.beam:
+ if f.is_directory:
+ if f.basename != "ebin":
+ fail("{} contains a directory in 'beam' that is not an ebin dir".format(lib_info.lib_name))
+ o = ctx.actions.declare_file(path_join(plugin_path, "ebin"))
+ else:
+ o = ctx.actions.declare_file(path_join(plugin_path, "ebin", f.basename))
+ ctx.actions.symlink(
+ output = o,
+ target_file = f,
+ )
+ links.append(o)
+
+ for f in lib_info.priv:
+ p = additional_file_dest_relative_path(plugin.label, f)
+ o = ctx.actions.declare_file(path_join(plugin_path, p))
+ ctx.actions.symlink(
+ output = o,
+ target_file = f,
+ )
+ links.append(o)
+
+ return links
+
+def unique_versions(plugins):
+ erlang_versions = []
+ for plugin in plugins:
+ erlang_version = plugin[ErlangLibInfo].erlang_version
+ if not erlang_version in erlang_versions:
+ erlang_versions.append(erlang_version)
+ return erlang_versions
+
+def flatten(list_of_lists):
+ return [item for sublist in list_of_lists for item in sublist]
+
+def _impl(ctx):
+ plugins = flat_deps(ctx.attr.plugins)
+
+ erlang_versions = unique_versions(plugins)
+ if len(erlang_versions) > 1:
+ fail("plugins do not have a unified erlang version", erlang_versions)
+
+ scripts = [_copy_script(ctx, script) for script in ctx.files._scripts]
+
+ rabbitmq_ctl_copies = [
+ "rabbitmq-diagnostics",
+ "rabbitmq-plugins",
+ "rabbitmq-queues",
+ "rabbitmq-streams",
+ "rabbitmq-tanzu",
+ "rabbitmq-upgrade",
+ "rabbitmqctl",
+ ]
+ escripts = [link_escript(ctx, escript) for escript in rabbitmq_ctl_copies]
+
+ plugins = flatten([_plugins_dir_links(ctx, plugin) for plugin in plugins])
+
+ rabbitmqctl = None
+ for script in scripts:
+ if script.basename == "rabbitmqctl":
+ rabbitmqctl = script
+ if rabbitmqctl == None:
+ fail("could not find rabbitmqct among", scripts)
+
+ return [
+ RabbitmqHomeInfo(
+ rabbitmqctl = rabbitmqctl,
+ ),
+ DefaultInfo(
+ files = depset(scripts + escripts + plugins),
+ ),
+ ]
+
+RABBITMQ_HOME_ATTRS = {
+ "_scripts": attr.label_list(
+ default = [
+ "//deps/rabbit:scripts/rabbitmq-defaults",
+ "//deps/rabbit:scripts/rabbitmq-diagnostics",
+ "//deps/rabbit:scripts/rabbitmq-env",
+ "//deps/rabbit:scripts/rabbitmq-plugins",
+ "//deps/rabbit:scripts/rabbitmq-queues",
+ "//deps/rabbit:scripts/rabbitmq-server",
+ "//deps/rabbit:scripts/rabbitmqctl",
+ ],
+ allow_files = True,
+ ),
+ "_rabbitmqctl_escript": attr.label(default = "//deps/rabbitmq_cli:rabbitmqctl"),
+ "plugins": attr.label_list(providers = [ErlangLibInfo]),
+}
+
+rabbitmq_home = rule(
+ implementation = _impl,
+ attrs = RABBITMQ_HOME_ATTRS,
+)
+
+def _dirname(p):
+ return p.rpartition("/")[0]
+
+def rabbitmq_home_short_path(rabbitmq_home):
+ short_path = rabbitmq_home[RabbitmqHomeInfo].rabbitmqctl.short_path
+ if rabbitmq_home.label.workspace_root != "":
+ short_path = path_join(rabbitmq_home.label.workspace_root, short_path)
+ return _dirname(_dirname(short_path))
diff --git a/rabbitmq_package_generic_unix.bzl b/rabbitmq_package_generic_unix.bzl
new file mode 100644
index 0000000000..b589a06529
--- /dev/null
+++ b/rabbitmq_package_generic_unix.bzl
@@ -0,0 +1,19 @@
+load("@//:rabbitmq_home.bzl", "RabbitmqHomeInfo")
+
+def _impl(ctx):
+ return [
+ RabbitmqHomeInfo(
+ rabbitmqctl = ctx.file.rabbitmqctl,
+ ),
+ DefaultInfo(
+ files = depset(ctx.files.rabbitmqctl + ctx.files.additional_files),
+ ),
+ ]
+
+rabbitmq_package_generic_unix = rule(
+ implementation = _impl,
+ attrs = {
+ "rabbitmqctl": attr.label(allow_single_file = True),
+ "additional_files": attr.label_list(allow_files = True),
+ },
+)
diff --git a/rabbitmq_run.bzl b/rabbitmq_run.bzl
new file mode 100644
index 0000000000..e690451e13
--- /dev/null
+++ b/rabbitmq_run.bzl
@@ -0,0 +1,68 @@
+load("@bazel-erlang//:erlang_home.bzl", "ErlangHomeProvider", "ErlangVersionProvider")
+load("@bazel-erlang//:bazel_erlang_lib.bzl", "path_join")
+load("@bazel-erlang//:ct.bzl", "sanitize_sname")
+load(":rabbitmq_home.bzl", "RabbitmqHomeInfo", "rabbitmq_home_short_path")
+
+def _impl(ctx):
+ rabbitmq_home_path = rabbitmq_home_short_path(ctx.attr.home)
+
+ # the rabbitmq-run.sh template only allows a single erl_libs currently
+ erl_libs = [path_join(rabbitmq_home_path, "plugins")]
+
+ ctx.actions.expand_template(
+ template = ctx.file._template,
+ output = ctx.outputs.executable,
+ substitutions = {
+ "{RABBITMQ_HOME}": rabbitmq_home_path,
+ "{ERL_LIBS}": ":".join(erl_libs),
+ "{ERLANG_HOME}": ctx.attr._erlang_home[ErlangHomeProvider].path,
+ "{SNAME}": sanitize_sname("sbb-" + ctx.attr.name),
+ },
+ is_executable = True,
+ )
+
+ runfiles = ctx.runfiles(ctx.attr.home[DefaultInfo].files.to_list())
+
+ return [DefaultInfo(runfiles = runfiles)]
+
+rabbitmq_run = rule(
+ implementation = _impl,
+ attrs = {
+ "_template": attr.label(
+ default = Label("//:scripts/bazel/rabbitmq-run.sh"),
+ allow_single_file = True,
+ ),
+ "_erlang_home": attr.label(default = "@bazel-erlang//:erlang_home"),
+ "home": attr.label(providers = [RabbitmqHomeInfo]),
+ },
+ executable = True,
+)
+
+def _run_command_impl(ctx):
+ ctx.actions.write(
+ output = ctx.outputs.executable,
+ content = "exec ./{} {} $@".format(
+ ctx.attr.rabbitmq_run[DefaultInfo].files_to_run.executable.short_path,
+ ctx.attr.subcommand,
+ ),
+ )
+
+ return [DefaultInfo(
+ runfiles = ctx.attr.rabbitmq_run[DefaultInfo].default_runfiles,
+ )]
+
+rabbitmq_run_command = rule(
+ implementation = _run_command_impl,
+ attrs = {
+ "rabbitmq_run": attr.label(
+ executable = True,
+ cfg = "target",
+ ),
+ "subcommand": attr.string(values = [
+ "run-broker",
+ "start-background-broker",
+ "stop-node",
+ ]),
+ },
+ executable = True,
+)
diff --git a/rabbitmqctl.bzl b/rabbitmqctl.bzl
new file mode 100644
index 0000000000..19287521c0
--- /dev/null
+++ b/rabbitmqctl.bzl
@@ -0,0 +1,32 @@
+load("@bazel-erlang//:erlang_home.bzl", "ErlangVersionProvider")
+load(":rabbitmq_home.bzl", "RabbitmqHomeInfo", "rabbitmq_home_short_path")
+
+def _impl(ctx):
+ erlang_version = ctx.attr._erlang_version[ErlangVersionProvider].version
+
+ rabbitmq_home_path = rabbitmq_home_short_path(ctx.attr.home)
+
+ script = """
+ exec ./{home}/sbin/{cmd} $@
+ """.format(
+ home = rabbitmq_home_path,
+ cmd = ctx.label.name,
+ )
+
+ ctx.actions.write(
+ output = ctx.outputs.executable,
+ content = script,
+ )
+
+ return [DefaultInfo(
+ runfiles = ctx.runfiles(ctx.attr.home[DefaultInfo].files.to_list()),
+ )]
+
+rabbitmqctl = rule(
+ implementation = _impl,
+ attrs = {
+ "_erlang_version": attr.label(default = "@bazel-erlang//:erlang_version"),
+ "home": attr.label(providers = [RabbitmqHomeInfo]),
+ },
+ executable = True,
+)
diff --git a/release-notes/3.5.7.md b/release-notes/3.5.7.md
new file mode 100644
index 0000000000..a1a298e146
--- /dev/null
+++ b/release-notes/3.5.7.md
@@ -0,0 +1,169 @@
+## RabbitMQ 3.5.7
+
+RabbitMQ `3.5.7` is primarily a bug fix release.
+
+### Server
+
+#### Bug Fixes
+
+ * Fix a race condition that could prevent nodes from stopping.
+
+ GitHub issue: [rabbitmq-server#465](https://github.com/rabbitmq/rabbitmq-server/issues/465)
+
+ * `file_handle_cache:clear_read_cache/0` no longer silently fails.
+
+ The function is mean to be used with `rabbitmqctl eval` in environments where
+ in-process buffering of file data is enabled.
+
+ GitHub issue: [rabbitmq-server#436](https://github.com/rabbitmq/rabbitmq-server/issues/436)
+
+ * Default `RABBITMQ_PLUGINS_DIR` value on Windows should be calculated as relative to RabbitMQ
+ server installation directory, not `RABBITMQ_BASE`.
+
+ If you override `RABBITMQ_BASE`, you may need to copy plugins from `%RABBITMQ_BASE%\plugins`
+ to the `plugins` directory under RabbitMQ installation.
+
+ GitHub issue: [rabbitmq-server#433](https://github.com/rabbitmq/rabbitmq-server/issues/433)
+
+ * When rabbitmq-server failed to start due to an unreadable config file, exit code was 0
+
+ GitHub issue: [rabbitmq-server#464](https://github.com/rabbitmq/rabbitmq-server/issues/464)
+
+ * Overriding `RABBITMQ_LOGS` and `RABBITMQ_SASL_LOGS` on Windows prevented node from starting.
+
+ GitHub issue: [rabbitmq-server#375](https://github.com/rabbitmq/rabbitmq-server/issues/375)
+
+ * Some startup code bits assumed IPv4 is enabled in the environment
+
+ GitHub issue: [rabbitmq-server#117](https://github.com/rabbitmq/rabbitmq-server/issues/117)
+
+ * More robust disk monitor, fewer errors logged.
+
+ GitHub issue: [rabbitmq-server#91](https://github.com/rabbitmq/rabbitmq-server/issues/91)
+
+ * `rabbitmqctl forget_cluster_node` no longer attempts to impersonate live
+ (reachable) nodes.
+
+ GitHub issue: [rabbitmq-server#470](https://github.com/rabbitmq/rabbitmq-server/issues/470)
+
+ * `rabbitmq-plugins` was using an incorrect env variable.
+
+ GitHub issue: [rabbitmq-server#451](https://github.com/rabbitmq/rabbitmq-server/issues/451)
+
+#### Enhancements
+
+ * RAM watermark can be configured as an absolute value in bytes (just like with disk space monitoring):
+
+ ```
+ [{rabbit, [{vm_memory_high_watermark, {absolute, 1073741824}}]}].
+ ```
+
+ GitHub issue: [rabbitmq-server#207](https://github.com/rabbitmq/rabbitmq-server/issues/207)
+
+ * `rabbitmqctl authenticate_user` is a new command that can be used to test user authentication.
+
+ GitHub issue: [rabbitmq-server#119](https://github.com/rabbitmq/rabbitmq-server/issues/119)
+
+ * `rabbitmqctl` now supports `exclusive` as a queue info item.
+
+ Contributed by Alexey Lebedeff (@binarin).
+
+ GitHub issue: [rabbitmq-server#371](https://github.com/rabbitmq/rabbitmq-server/issues/371)
+
+
+### Java client
+
+#### Enhancements
+
+ * Heartbeats are now enabled by default.
+
+ GitHub issue: [rabbitmq-java-client#109](https://github.com/rabbitmq/rabbitmq-java-client/issues/109).
+
+#### Bug Fixes
+
+ * `AutorecoveringChannel#basicCancel` could throw a `NullPointerException`.
+
+ GitHub issue: [rabbitmq-java-client#105](https://github.com/rabbitmq/rabbitmq-java-client/issues/105)
+
+
+### .NET client
+
+#### Enhancements
+
+ * Heartbeats are now enabled by default.
+
+ GitHub issue: [rabbitmq-dotnet-client#142](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/142).
+
+
+#### Bug Fixes
+
+ * `IConnection.Dispose` could throw an exception.
+
+ GitHub issue: [rabbitmq-dotnet-client#133](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/133)
+
+
+### MQTT plugin
+
+#### Bug Fixes
+
+ * Auto-deletion of queues that back QoS 1 subscriptions is again controlled
+ by the clean session flag.
+
+ GitHub issues: [rabbitmq-mqtt#30](https://github.com/rabbitmq/rabbitmq-mqtt/issues/30) (original report which introduced the issue), [rabbitmq-mqtt#37](https://github.com/rabbitmq/rabbitmq-mqtt/issues/37) (clarification and fix)
+
+
+
+### STOMP plugin
+
+#### Enhancements
+
+ * Queue names now can be set explicitly using the `x-queue-name` header, for all
+ destination types.
+
+ GitHub issue: [rabbitmq-stomp#43](https://github.com/rabbitmq/rabbitmq-stomp/issues/43)
+
+
+
+### Management plugin
+
+#### Bug Fixes
+
+ * Management plugin could prevent a node from starting when it recovers after a network split.
+
+ GitHub issue: [rabbitmq-management#81](https://github.com/rabbitmq/rabbitmq-management/issues/81)
+
+ * Sorting arrow direction in the UI is now more conventional.
+
+ Contributed by Philippe Serhal.
+
+ GitHub issue: [rabbitmq-management#88](https://github.com/rabbitmq/rabbitmq-management/pull/88).
+
+ * 401 responses correctly set content type to `application/json`
+
+ GitHub issue: [rabbitmq-management#67](https://github.com/rabbitmq/rabbitmq-management/issues/67)
+
+ * Consumer utilisation is correctly reported as a number instead of `""` when it equals 0.
+
+ GitHub issue: [rabbitmq-management#26](https://github.com/rabbitmq/rabbitmq-management/issues/26)
+
+ * `rabbitmqadmin` misinterpreted boolean settings in config as strings
+
+ GitHub issue: [rabbitmq-management#20](https://github.com/rabbitmq/rabbitmq-management/issues/20)
+
+
+### AMQP 1.0 plugin
+
+#### Bug Fixes
+
+ * Fixed an edge case in serialisation of collections.
+
+ GitHub issue: [rabbitmq-amqp1.0#21](https://github.com/rabbitmq/rabbitmq-amqp1.0/issues/21)
+
+
+### Erlang client
+
+#### Enhancements
+
+ * Heartbeats are now enabled by default.
+
+ GitHub issue: [rabbitmq-erlang-client#25](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/25).
diff --git a/release-notes/3.5.8.md b/release-notes/3.5.8.md
new file mode 100644
index 0000000000..c93803a0c8
--- /dev/null
+++ b/release-notes/3.5.8.md
@@ -0,0 +1,30 @@
+## RabbitMQ 3.5.8
+
+RabbitMQ `3.5.8` fixes a security vulnerability ([CVE-2016-9877](https://pivotal.io/security/cve-2016-9877)) in the MQTT plugin.
+
+### Server
+
+#### Security
+
+ * `rabbit_diagnostics:maybe_stuck/0` no longer prints process' dictionary
+ because it may contain PRNG seed values and other sensitive information.
+
+### MQTT Plugin
+
+#### Security
+
+ * Authentication with correct username but omitted password succeeded when TLS/x509 certificate
+ wasn't provided by the client. CVE allocation for this vulnerability is pending.
+
+ GitHub issue: [rabbitmq-mqtt#96](https://github.com/rabbitmq/rabbitmq-mqtt/issues/96)
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-3.5.8.tar.gz`.
diff --git a/release-notes/3.6.0.md b/release-notes/3.6.0.md
new file mode 100644
index 0000000000..2783b3bee6
--- /dev/null
+++ b/release-notes/3.6.0.md
@@ -0,0 +1,462 @@
+## Release Highlights
+
+This is the release notes for RabbitMQ 3.6.0 RC3. This is a preview of
+a feature release.
+
+Key improvements in this release are:
+
+ * Lazy queues
+ * Much better queue synchronisation throughput
+ * Lower RAM use, tunable flow control
+ * Stronger password encryption with pluggable algorithms
+ * Development moved to GitHub; build system now uses erlang.mk
+ * Significant improvements to Web STOMP
+ * Experimental WinRT-compatible .NET client, SQL CLR compatibility in the "regular" one
+ * Pagination in management UI
+
+
+### Server
+
+#### Bug Fixes
+
+ * Cluster-wide alarm state is not updated when alarmed node goes down.
+
+ Contributed by Alexey Lebedeff (@binarin).
+
+ GitHub issue: [rabbitmq-server#362](https://github.com/rabbitmq/rabbitmq-server/issues/362)
+
+ * Blocked connections could be unblocked too early.
+
+ When multiple resource alarms were in effect, clearing just one of them would
+ unblock all publishers prematurely.
+
+ Contributed by Alexey Lebedeff (@binarin).
+
+ GitHub issue: [rabbitmq-server#379](https://github.com/rabbitmq/rabbitmq-server/issues/379)
+
+ * Blocked connections that are closed by clients are now expired
+ and cleaned up by the heartbeat mechanism.
+
+ Contributed by Alexey Lebedeff (@binarin).
+
+ GitHub issue: [rabbitmq-server#384](https://github.com/rabbitmq/rabbitmq-server/issues/384)
+
+ * If `RABBITMQ_MNESIA_DIR` was set to a value with a trailing slash, it could prevent
+ automatic clustering (and other operations that assume pristine node state) from starting.
+
+ GitHub issue: [rabbitmq-server#118](https://github.com/rabbitmq/rabbitmq-server/issues/118)
+
+ * Standard output and error redirection in package scripts not respects `RABBITMQ_LOG_BASE`.
+
+ GitHub issue: [rabbitmq-server#88](https://github.com/rabbitmq/rabbitmq-server/issues/88)
+
+ * Properly seed pseudo-random number generator when picking client port
+
+ Contributed by Alexey Lebedeff (@binarin).
+
+ GitHub issue: [rabbitmq-common#3](https://github.com/rabbitmq/rabbitmq-common/issues/3)
+
+ * `RABBITMQ_ALLOW_INPUT` was ignored on Windows
+
+ GitHub issue: [rabbitmq-server#490](https://github.com/rabbitmq/rabbitmq-server/issues/490)
+
+
+#### Enhancements
+
+ * Lazy queues.
+
+ Lazy queues is a new feature that can significantly reduce RAM use of queues
+ that can grow very long (e.g. don't have consumers online for hours or day).
+ Such queues page messages to disk as they enter the queue.
+
+ This feature is opt-in and has virtually no overhead for consumers in most cases,
+ making throughput for publishers much more even and reducing RAM use.
+
+ GitHub issue: [rabbitmq-server#351](https://github.com/rabbitmq/rabbitmq-server/issues/351)
+
+ * Queue synchronisation to mirrors now happens in batches of configurable size,
+ 4096 messages by default. This can improve sync throughput by an order of magnitude.
+
+ GitHub issue: [rabbitmq-server#336](https://github.com/rabbitmq/rabbitmq-server/issues/336)
+
+ * queue leader-to-node distribution is now more flexible: there are several
+ strategies to choose from.
+
+ GitHub issue: [rabbitmq-server#121](https://github.com/rabbitmq/rabbitmq-server/issues/121)
+
+ * Stronger password encryption algorithm.
+
+ SHA-256 is the hashing function used by default now, with SHA-512 being an option provided
+ out of the box. Support for more options (Scrypt, bcrypt, etc) can be added with plugins.
+
+ Existing user records will continue using MD5. To force re-hashing/encryption, change
+ user password using `rabbitmqctl` or management UI/HTTP API.
+
+ This should improve FIPS-180 compatibility.
+
+ GitHub issue: [rabbitmq-server#270](https://github.com/rabbitmq/rabbitmq-server/issues/270)
+
+ * Lower RAM use thanks to disabled in-process caching.
+
+ RabbitMQ's in-process read buffers are now disabled by default. This often significantly
+ reduces RAM usage at effectively no throughput or latency cost because the OS and file
+ system do the same job fairly well.
+
+ GitHub issue: [rabbitmq-server#228](https://github.com/rabbitmq/rabbitmq-server/issues/228)
+
+ * [Configurable flow control settings](https://blog.rabbitmq.com/posts/2015/10/new-credit-flow-settings-on-rabbitmq-3-5-5/)
+
+ Note that larger values will not necessarily lead to higher throughput or lower latency.
+ They can also result in larger RAM use and therefore can be dangerous.
+
+ GitHub issue: [rabbitmq-server#143](https://github.com/rabbitmq/rabbitmq-server/issues/143)
+
+ * Build system switched to erlang.mk.
+
+ [erlang.mk](https://github.com/ninenines/erlang.mk) is a modern build system for Erlang
+ projects. RabbitMQ and all of its sub-projects have been switched to it.
+
+ This yields several benefits for RabbitMQ maintainers and plugin developers,
+ for example, dependency management for plugins is much more straightforward, build
+ times are significantly improved, there is no need to depend on `rabbitmq-server` for most
+ plugins.
+
+ Various 3rd party plugins need to migrate to erlang.mk in order to be built against
+ RabbitMQ 3.6.0 (`rabbit_common`).
+
+ GitHub issue: [rabbitmq-server#388](https://github.com/rabbitmq/rabbitmq-server/issues/388)
+
+ * Streaming `rabbitmqctl`: `rabbitmqctl` list operations now stream results
+ instead of waiting for the entire result set to arrive. This both improves
+ responsiveness and makes it possible to list individual items, including those
+ which did not respond in time.
+
+ GitHub issue: [rabbitmq-server#62](https://github.com/rabbitmq/rabbitmq-server/issues/62)
+
+ * (More) standard exit codes for command line tools
+
+ `rabbitmqctl` and `rabbitmq-plugins` now use more standard, or at least distinctive,
+ error codes for various failures.
+
+ Contributed by Alexey Lebedeff (@binarin).
+
+ GitHub issue: [rabbitmq-server#396](https://github.com/rabbitmq/rabbitmq-server/issues/396)
+
+ * Improved log message for channel exceptions.
+
+ GitHub issues: [rabbitmq-server#416](https://github.com/rabbitmq/rabbitmq-server/issues/416)
+
+ * Improved log message when a client's TCP connection is closed unexpectedly.
+
+ GitHub issue: [rabbitmq-server#60](https://github.com/rabbitmq/rabbitmq-server/issues/60)
+
+ * Improved log message when a client connects to a non-existent vhost
+
+ GitHub issue: [rabbitmq-server#320](https://github.com/rabbitmq/rabbitmq-server/issues/320)
+
+ * Improved log message for `access_refused` connection errors.
+
+ GitHub issue: [rabbitmq-server#418](https://github.com/rabbitmq/rabbitmq-server/issues/418)
+
+ * Improved log message for `command_invalid` connection errors.
+
+ GitHub issue: [rabbitmq-server#419](https://github.com/rabbitmq/rabbitmq-server/issues/419)
+
+ * More sensible error code when a client connects to a non-existent vhost (or vhost it has
+ no permissions for). The code returned is now `530`.
+
+ GitHub issue: [rabbitmq-server#237](https://github.com/rabbitmq/rabbitmq-server/issues/237)
+
+ * Memory monitoring interval is now configurable.
+
+ GitHub issue: [rabbitmq-server#112](https://github.com/rabbitmq/rabbitmq-server/issues/112)
+
+ * Prevent background GC interval from becoming too large due to backoff.
+
+ Contributed by Dallas Marlow (@dallasmarlow).
+
+ GitHub issue: [rabbitmq-server#100](https://github.com/rabbitmq/rabbitmq-server/issues/100)
+
+ * Windows installer now supports "unattended" NSIS installs.
+
+ Contributed by Ryan Zink (@ryanzink).
+
+ GitHub issue: [rabbitmq-server#264](https://github.com/rabbitmq/rabbitmq-server/issues/264)
+
+ * Windows package name now includes RabbitMQ version it provides.
+
+ GitHub issue: [rabbitmq-server#80](https://github.com/rabbitmq/rabbitmq-server/issues/80)
+
+ * `kernel.net_ticktime` is now included into `rabbitmqctl status` output.
+
+ GitHub issue: [rabbitmq-server#63](https://github.com/rabbitmq/rabbitmq-server/issues/63)
+
+ * `rabbitmqctl` now has a command for setting up free disk space limit at runtime.
+
+ GitHub issue: [rabbitmq-server#461](https://github.com/rabbitmq/rabbitmq-server/issues/461)
+
+ * Backup directory location is now mentioned in failed upgrade messages.
+
+ GitHub issue: [rabbitmq-server#169](https://github.com/rabbitmq/rabbitmq-server/issues/169)
+
+ * `rabbit_diagnostics:maybe_stuck/0` now includes date and time in its output.
+
+ GitHub issue: [rabbitmq-server#506](https://github.com/rabbitmq/rabbitmq-server/issues/506)
+
+#### Other
+
+ * Erlang R16B03 is now required.
+
+ 17.5 or 18.x are recommended.
+
+ GitHub issue: [rabbitmq-server#250](https://github.com/rabbitmq/rabbitmq-server/issues/250)
+
+ * Use/support new Erlang/OTP 18 date and time API.
+
+ Only when it is available, of course.
+
+ GitHub issue: [rabbitmq-server#233](https://github.com/rabbitmq/rabbitmq-server/issues/233)
+
+ * RabbitMQ server now supports tracing of credit flow events (e.g. when a process is blocked
+ by credit flow). This is currently a compile-time flag aimed to be used in development
+ environments.
+
+ GitHub issue: [rabbitmq-server#137](https://github.com/rabbitmq/rabbitmq-server/issues/137)
+
+
+### Java Client
+
+#### Enhancements
+
+ * Shutdown threads can use an executor.
+
+ In environments with very constrainted per-process thread reasources, e.g. basic PaaS plans,
+ temporary threads created during connection shutdown may make the app run over the limit.
+
+ It is now possible to use a user-provided executor for shutdown operations instead.
+
+ GitHub issue: [rabbitmq-java-client#87](https://github.com/rabbitmq/rabbitmq-java-client/issues/87)
+
+ * ChannelManager now can use an executor.
+
+ Similar to the above but for `ChannelManager` operations.
+
+ GitHub issue: [rabbitmq-java-client#94](https://github.com/rabbitmq/rabbitmq-java-client/issues/94)
+
+ * `Channel#consumerCount(string)` is a new convenience method for retrieving number of consumers on a queue.
+
+ GitHub issue: [rabbitmq-java-client#49](https://github.com/rabbitmq/rabbitmq-java-client/issues/49)
+
+ * `Channel#messageCount(string)` is a new convenience method for retrieving number of messages in a queue.
+
+ GitHub issue: [rabbitmq-java-client#41](https://github.com/rabbitmq/rabbitmq-java-client/issues/41)
+
+ * `LongString#toString` is part of the API
+
+ GitHub issue: [rabbitmq-java-client#84](https://github.com/rabbitmq/rabbitmq-java-client/issues/84)
+
+
+### .NET Client
+
+#### Enhancements
+
+ * Host/address list support is back. Host selection strategy (when recovering a connection) is now pluggable.
+
+ GitHub issue: [rabbitmq-dotnet-client#81](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/81)
+
+ * Experimental .NET client with Windows RT support is now available.
+
+ GitHub issue: [rabbitmq-dotnet-client#16](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/16)
+
+ * SQL CLR compatibility.
+
+ GitHub issue: [rabbitmq-dotnet-client#57](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/57)
+
+ * `IModel#ConsumerCount(string)` is a new convenience method for retrieving number of consumers on a queue.
+
+ GitHub issue: [rabbitmq-dotnet-client#73](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/73)
+
+ * `IModel#MessageCount(string)` is a new convenience method for retrieving number of messages in a queue.
+
+ GitHub issue: [rabbitmq-dotnet-client#74](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/74)
+
+ * Continuation timeout is now configurable.
+
+ GitHub issue: [rabbitmq-dotnet-client#80](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/80)
+
+ * `RabbitMQ.Client.Headers` is a new class that contains constants for commonly used headers.
+
+ Contributed by Yury Pliner (@Pliner).
+
+ GitHub issue: [](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/52)
+
+#### Other
+
+ * .NET 4.5 is now required by the library.
+
+ GitHub issue: [rabbitmq-dotnet-client#134](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/134)
+
+ * Code examples are removed from the .NET client repository. All examples belong
+ to `rabbitmq-tutorials` now.
+
+ GitHub issue: [rabbitmq-dotnet-client#39](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/39)
+
+
+
+### Management plugin
+
+#### Enhancements
+
+ * Pagination for connections, channels, exchanges, and queues.
+
+ Management UI now supports pagination, making the UI much more responsive
+ in environments with many connections, queues, etc.
+
+ GitHub issue: [rabbitmq-management#50](https://github.com/rabbitmq/rabbitmq-management/issues/50)
+
+ * Queue information objects in responses now has a boolean property for queue
+ exclusivity instead of a confusing `owner_pid`.
+
+ GitHub issue: [rabbitmq-management#40](https://github.com/rabbitmq/rabbitmq-management/issues/40)
+
+#### Bug Fixes
+
+ * Connection and channel filtering should respect active vhost.
+
+ GitHub issue: [rabbitmq-management#34](https://github.com/rabbitmq/rabbitmq-management/issues/34)
+
+ * GET requests to `/api/queues/{vhost}/{name}/get` now accept/provide `application/json`
+
+ GitHub issue: [rabbitmq-management#46](https://github.com/rabbitmq/rabbitmq-management/issues/46)
+
+
+### MQTT plugin
+
+#### Enhancements
+
+ * Pluggable retained message stores. Since MQTT 3.1.1 spec doesn't dictate any consistency
+ or availability from the store in clustered environments, we only provide an in-memory implementation
+ suitable for development environments with a single node. For production environments, plugins that
+ provide stores with specific consistency guarantees are expected to be developed.
+
+ GitHub issue: [rabbitmq-mqtt#16](https://github.com/rabbitmq/rabbitmq-mqtt/issues/16)
+
+ * Default subscription (queue) TTL for non-clean sessions is now 24 hours
+
+ GitHub issue: [rabbitmq-mqtt#49](https://github.com/rabbitmq/rabbitmq-mqtt/issues/49)
+
+ * Socket-based authentication plugins now can work with plugins such as MQTT and STOMP
+ (those using direct Erlang client).
+
+ Contributed by Petr Gotthard (@gotthardp).
+
+ GitHub issue: [rabbitmq-server#111](https://github.com/rabbitmq/rabbitmq-server/issues/111)
+
+
+### STOMP plugin
+
+#### Enhancement
+
+ * Control over queue properties.
+
+ It is now possible to control queue properties (durable, auto-delete, exclusive plus several
+ supported x-arguments) for all STOMP destinations.
+
+ When using durable topics, it is now necessary to configure queue auto-deletion to `false`
+ explicitly via a header.
+
+ GitHub issue: [rabbitmq-stomp#24](https://github.com/rabbitmq/rabbitmq-stomp/issues/24)
+
+ * Socket-based authentication plugins now can work with plugins such as MQTT and STOMP
+ (those using direct Erlang client).
+
+ Contributed by Petr Gotthard (@gotthardp).
+
+ GitHub issue: [rabbitmq-server#111](https://github.com/rabbitmq/rabbitmq-server/issues/111)
+
+
+### Web STOMP plugin
+
+#### Enhancements
+
+ * Raw WebSocket endpoint.
+
+ The plugin now supports a new endpoint, `/ws`, which supports WebSocket connections directly
+ without the need to use SockJS.
+
+ GitHub issue: [rabbitmq-web-stomp#17](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/17).
+
+ * Support for binary data streams.
+
+ Thanks to the new direct WebSocket support, binary data now can be transferred over
+ Web STOMP connections.
+
+ GitHub issue: [rabbitmq-web-stomp#19](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/19)
+
+ * STOMP heartbeat support.
+
+ GitHub issue: [rabbitmq-web-stomp#15](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/15)
+
+ * Heartbeats are disabled for SockJS.
+
+ GitHub issue: [rabbitmq-web-stomp#28](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/28)
+
+ * CORS headers support
+
+ GitHub issue: [rabbitmq-web-stomp#12](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/12)
+
+ * Cowboy configuration.
+
+ Key settings of Cowboy, the HTTP/WebSocket server powering the plugin, now can be fine tuned
+ via this plugin.
+
+ GitHub issue: [rabbitmq-web-stomp#22](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/22)
+
+ * Cowboy is upgraded to `1.0.3`
+
+ GitHub issue: [rabbitmq-web-stomp#13](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/13)
+
+ * SockJS is upgraded to `1.0.3`.
+
+ GitHub issue: [rabbitmq-web-stomp#4](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/4)
+
+ * Queue parameters now can be configured via headers (just like in the regular STOMP plugin).
+
+ GitHub issue: [rabbitmq-web-stomp#7](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/7)
+
+ * Address Web STOMP listener binds to now can be configured.
+ Previously only port was configurable.
+
+ GitHub issue: [rabbitmq-web-stomp#2](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/2)
+
+
+### LDAP plugin
+
+#### Enhancements
+
+ * LDAP server connections are now pooled. This significantly reduces
+ the load on LDAP servers.
+
+ Contributed by [LShift](http://www.lshift.net).
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#1](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/1).
+
+
+
+### Sharding plugin
+
+#### Enhancements
+
+ * Routing key policy is now optional.
+
+ GitHub issue: [rabbitmq-sharding#6](https://github.com/rabbitmq/rabbitmq-sharding/issues/6)
+
+
+
+### Delayed Message Exchange
+
+#### Enhancement
+
+ * Initial public release.
+
+ GitHub issue: [rabbitmq-delayed-message-exchange#1](https://github.com/rabbitmq/rabbitmq-delayed-message-exchange/issues/1)
diff --git a/release-notes/3.6.1.md b/release-notes/3.6.1.md
new file mode 100644
index 0000000000..6316dd1a1a
--- /dev/null
+++ b/release-notes/3.6.1.md
@@ -0,0 +1,280 @@
+## RabbitMQ 3.6.1
+
+RabbitMQ `3.6.1` is a maintenance release that includes a fix for CVE-2015-8786,
+a vulnerability in RabbitMQ management plugin.
+
+### Server
+
+#### Bug Fixes
+
+ * Purging a lazy queue could result in an exception
+
+ GitHub issue: [rabbitmq-server#514](https://github.com/rabbitmq/rabbitmq-server/issues/514)
+
+ * Ensure `epmd` is running before starting RabbitMQ node on Windows
+
+ GitHub issue: [rabbitmq-server#625](https://github.com/rabbitmq/rabbitmq-server/issues/625)
+
+ * Channel error could make broker unreachable
+
+ Those errors were misleadingly logged as `channel_termination_timeout`, which
+ the issue really was with unhandled messages from concurrently closed TCP socket process.
+
+ GitHub issue: [rabbitmq-server#530](https://github.com/rabbitmq/rabbitmq-server/issues/530)
+
+ * (Automatic) deletion of an auto-delete queue could lead
+ to blocked channels
+
+ GitHub issue: [rabbitmq-server#581](https://github.com/rabbitmq/rabbitmq-server/issues/581)
+
+ * During (from scratch) queue sync, queue leader node didn't respect mirror alarm state.
+ With large data sets this could drive mirror node out of memory.
+
+ GitHub issue: [rabbitmq-server#616](https://github.com/rabbitmq/rabbitmq-server/issues/616)
+
+ * Changing password for users with non-standard (think broker configuration) password
+ hashing function, for example, those migrated from `3.5.x` releases, didn't update
+ effective hashing function.
+
+ GitHub issue: [rabbitmq-server#623](https://github.com/rabbitmq/rabbitmq-server/issues/623)
+
+ * Heavy and/or prolonged `rabbitmqctl` use could exhaust Erlang VM atom table
+
+ GitHub issue: [rabbitmq-server#549](https://github.com/rabbitmq/rabbitmq-server/issues/549)
+
+ * "Min masters" queue leader location strategy could result
+ in an error.
+
+ GitHub issue: [rabbitmq-server#521](https://github.com/rabbitmq/rabbitmq-server/issues/521)
+
+ * Fixed a race condition in `pause_minority` handling mode.
+
+ GitHub issue: [rabbitmq-server#307](https://github.com/rabbitmq/rabbitmq-server/issues/307)
+
+ * Significantly reduce possibility of a race condition when
+ an exchange is deleted and immediately re-declared, e.g. by a federation
+ link.
+
+ This could result in a link operation being blocked, preventing
+ nodes from stopping.
+
+ GitHub issue: [rabbitmq-federation#7](https://github.com/rabbitmq/rabbitmq-federation/issues/7)
+
+ * `amq.rabbitmq.log` messages now have information about originating
+ node in message headers
+
+ GitHub issue: [rabbitmq-server#595](https://github.com/rabbitmq/rabbitmq-server/issues/595)
+
+ * `scripts/rabbitmq-env` now works with GNU sed 4.2.2
+
+ GitHub issue: [rabbitmq-server#592](https://github.com/rabbitmq/rabbitmq-server/issues/592)
+
+ * Exceptions in VM memory use calculator no longer affect broker startup
+
+ GitHub issue: [rabbitmq-server#328](https://github.com/rabbitmq/rabbitmq-server/issues/328)
+
+ * [Direct Reply-to](https://www.rabbitmq.com/direct-reply-to.html) capability is now advertised to clients
+
+ GitHub issue: [rabbitmq-server#520](https://github.com/rabbitmq/rabbitmq-server/issues/520)
+
+#### Enhancements
+
+ * Paths with non-ASCII characters on Windows are now handled
+
+ RabbitMQ now can be installed into a location with non-ASCII characters,
+ e.g. when username contains them.
+
+ GitHub issues: [rabbitmq-server#493](https://github.com/rabbitmq/rabbitmq-server/issues/493)
+
+ * Configurable number of TCP connection acceptors
+
+ Plus a x10 increase of the default. This helps with workloads where connection
+ churn is very high (e.g. all clients are PHP Web apps that cannot maintain
+ long-lived connections).
+
+ GitHub issues: [rabbitmq-server#528](https://github.com/rabbitmq/rabbitmq-server/issues/528)
+
+ * `rabbitmqctl cluster_status` now includes cluster-wide resource alarm status
+
+ GitHub issue: [rabbitmq-server#392](https://github.com/rabbitmq/rabbitmq-server/issues/392)
+
+ * Windows installer no longer jumps over installation log
+
+ GitHub issue: [rabbitmq-server#634](https://github.com/rabbitmq/rabbitmq-server/issues/634)
+
+ * Improved `rabbitmqctl reset` error messages
+
+ GitHub issue: [rabbitmq-server#167](https://github.com/rabbitmq/rabbitmq-server/issues/167)
+
+ * More unsigned field data types are supported.
+
+ GitHub issue: [rabbitmq-server#20](https://github.com/rabbitmq/rabbitmq-server/issues/20)
+
+
+
+### Java client
+
+#### Enhancements
+
+ * Endpoints (hostnames) are now also accepted as a `java.util.List`
+
+ GitHub issue: [rabbitmq-java-client#125](https://github.com/rabbitmq/rabbitmq-java-client/issues/125)
+
+ * Autorecovering connections now shuffle hosts in a more
+ reliable way
+
+ GitHub issue: [rabbitmq-java-client#124](https://github.com/rabbitmq/rabbitmq-java-client/issues/124)
+
+#### Bug Fixes
+
+ * Binding recovery could fail
+
+ GitHub issue: [rabbitmq-java-client#129](https://github.com/rabbitmq/rabbitmq-java-client/issues/129)
+
+ * `Channel.queueDelete` could throw a `NullPointerException`
+
+ GitHub issue: [rabbitmq-java-client#120](https://github.com/rabbitmq/rabbitmq-java-client/issues/120)
+
+
+
+### .NET client
+
+#### Bug Fixes
+
+ * Autorecovering connections now use full list of provided hostnames
+ during recovery
+
+ GitHub issues: [rabbitmq-dotnet-client#153](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/153)
+
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Significantly reduce possibility of a race condition when
+ an exchange is deleted and immediately re-declared, e.g. by a federation
+ link
+
+ This rendered federation links dysfunctional.
+
+ GitHub issue: [rabbitmq-federation#7](https://github.com/rabbitmq/rabbitmq-federation/issues/7)
+
+
+
+### Management plugin
+
+#### Vulnerability Fixes
+
+ * CVE-2015-8786: user-provided query parameters `lengths_age` and `lengths_incr` had no validation
+ and could be used to exhaust server resources.
+
+ The attacker needs to have access to HTTP API (authenticate successfully and have sufficient
+ tags to pass authorisation) in order to carry out the attack.
+
+ There is no workaround for earlier releases.
+
+ Kudos to Vladimir Ivanov (Positive Technologies) for the responsible disclosure.
+
+ GitHub issue: [rabbitmq-management#97](https://github.com/rabbitmq/rabbitmq-management/issues/97)
+
+
+#### Enhancements
+
+ * Password hashing function is now included in exported definitions
+
+ Those upgrading from versions earlier than `3.6.0` via definitions export
+ won't have to temporarily set hashing function to MD5 to ensure export succeeds.
+
+ GitHub issue: [rabbitmq-management#117](https://github.com/rabbitmq/rabbitmq-management/issues/117)
+
+#### Bug Fixes
+
+ * Internet Explorer (9+) compatibility restored
+
+ GitHub issue: [rabbitmq-management#98](https://github.com/rabbitmq/rabbitmq-management/issues/98)
+
+ * Internet Explorer 11 compatibility fixes
+
+ GitHub issues: [rabbitmq-management#112](https://github.com/rabbitmq/rabbitmq-management/issues/112),
+ [rabbitmq-management#114](https://github.com/rabbitmq/rabbitmq-management/issues/114)
+
+ * When policy fails to be created with invalid paramaters a sensible
+ error message will be displayed.
+
+ GitHub issue: [rabbitmq-management#110](https://github.com/rabbitmq/rabbitmq-management/issues/110)
+
+
+### Federation Management plugin
+
+#### Enhancements
+
+ * Federation link form now includes more settings (that are exchange- and queue-federation specific)
+
+ GitHub issue: [rabbitmq-federation-management#5](https://github.com/rabbitmq/rabbitmq-federation-management/issues/5)
+
+
+### Erlang client
+
+#### Bug Fixes
+
+ * `password` and `depth` query parameters are now propagated to TLS options
+
+ GitHub issue: [rabbitmq-erlang-client#36](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/36)
+
+
+### STOMP plugin
+
+#### Bug Fixes
+
+ * `durable` and `persistent` headers weren't always used interchangeably,
+ leading to non-durable subscriptions
+
+ GitHub issue: [rabbitmq-stomp#58](https://github.com/rabbitmq/rabbitmq-stomp/issues/58)
+
+ * Client heartbeat timeouts resulted in confusing error messages
+ in broker log.
+
+ GitHub issues: [rabbitmq-stomp#63](https://github.com/rabbitmq/rabbitmq-stomp/issues/63)
+
+
+### Web STOMP plugin
+
+#### Bug Fixes
+
+ * Cowboy options are now supported for TLS listeners.
+
+ GitHub issue: [rabbitmq-web-stomp#36](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/36)
+
+
+### Web STOMP Examples plugin
+
+#### Bug Fixes
+
+ * Multi-byte UTF-8 characters are now handled
+ by the bundled version of stomp.js.
+
+ GitHub issue: [rabbitmq-web-stomp-examples#2](https://github.com/rabbitmq/rabbitmq-web-stomp-examples/issues/2)
+
+
+### Event Exchange plugin
+
+#### Bug Fixes
+
+ * Event timestamps are now in seconds, not milliseconds
+
+ Per AMQP 0-9-1 spec. This is not a particularly great choice for events,
+ so we will add an optional header with millisecond precision in a future release.
+
+ GitHub issue: [rabbitmq-event-exchange#8](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/8)
+
+
+### JSON RPC plugin
+
+Note: this plugin is deprecated and its use is highly discouraged.
+
+#### Enhancements
+
+ * RabbitmQ `3.6.x` support.
+
+ GitHub issue: [rabbitmq-jsonrpc#3](https://github.com/rabbitmq/rabbitmq-jsonrpc/issues/3)
diff --git a/release-notes/3.6.10.md b/release-notes/3.6.10.md
new file mode 100644
index 0000000000..67f8d0339d
--- /dev/null
+++ b/release-notes/3.6.10.md
@@ -0,0 +1,183 @@
+## RabbitMQ 3.6.10
+
+RabbitMQ `3.6.10` is a maintenance release.
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading)
+for general documentation on upgrades.
+
+:warning: This release introduces stricter validation for `rabbitmq_management_agent.sample_retention_policies` values.
+In case your installation overrides default sample retention policies, please make sure to read
+the discussion in [rabbitmq-management-agent#41](https://github.com/rabbitmq/rabbitmq-management-agent/issues/41) before
+upgrading to make sure your configuration passes validation.
+
+This release has no other known incompatibilities with versions 3.6.7 through 3.6.9. See [3.6.7 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7)
+upgrade and compatibility notes if upgrading from an earlier release.
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * `rabbitmqctl wait` exited with the status code of 0 when node stopped because it could
+ not contact any cluster peers to [re-]join.
+
+ GitHub issue: [rabbitmq-server#1214](https://github.com/rabbitmq/rabbitmq-server/issues/1214)
+
+ * `rabbitmqctl forget_cluster_node` used in offline mode could result in promotion of a node that's no longer a cluster member.
+
+ GitHub issue: [rabbitmq-server#1213](https://github.com/rabbitmq/rabbitmq-server/issues/1213)
+
+ * Queue status (e.g. `running` vs. `idle`) was reported incorrectly in some cases.
+
+ GitHub issue: [rabbitmq-common#196](https://github.com/rabbitmq/rabbitmq-common/issues/196)
+
+ * queue leader locator could not be set using optional queue arguments (`x-arguments`).
+
+ GitHub issue: [rabbitmq-server#1172](https://github.com/rabbitmq/rabbitmq-server/issues/1172)
+
+ * CLI tool (e.g. `rabbitmqctl`) man pages were not rendered correctly.
+
+ GitHub issue: [rabbitmq-server#1180](https://github.com/rabbitmq/rabbitmq-server/issues/1180)
+
+#### Enhancements
+
+ * Disk space monitor will periodically retry (every 2 minutes by default, up to 10 times)
+ before going into disabled state as external tools used to monitor available disk space
+ can fail or produce unexpected output temporarily.
+
+ GitHub issue: [rabbitmq-server#1178](https://github.com/rabbitmq/rabbitmq-server/issues/1178)
+
+ * Memory relative free disk space limits now support integer values as well as floats.
+
+ GitHub issue: [rabbitmq-server#1194](https://github.com/rabbitmq/rabbitmq-server/issues/1194)
+
+
+### Management and Management Agent Plugins
+
+#### Bug Fixes
+
+ * TLS-related settings in HTTP API listeners could break JSON serialisation for the `GET /api/overview` endpoint.
+
+ GitHub issue: [rabbitmq-management#393](https://github.com/rabbitmq/rabbitmq-management/issues/393)
+
+ * Non-numerical values for numerical stats are now handled safety by stats aggregation. For example, if
+ free disk space monitor had to stop reporting data because it could not parse external tool output
+ that would break HTTP API `GET /api/overview` responses.
+
+ GitHub issue: [rabbitmq-management#375](https://github.com/rabbitmq/rabbitmq-management/issues/375).
+
+ * Stats are no longer emitted for connections that are not considered to be in the fully initialised
+ state.
+
+ GitHub issue: [rabbitmq-management-agent#42](https://github.com/rabbitmq/rabbitmq-management-agent/issues/42)
+
+ * `POST` requests now instruct clients to close TCP connections.
+
+ In some popular browsers (Chrome, Internet Explorer) a `POST` request followed by an immediate `GET` request
+ would result in a 400 response. Other browsers do no exhibit this behaviour.
+
+ GitHub issue: [rabbitmq-management#377](https://github.com/rabbitmq/rabbitmq-management/issues/377)
+
+ * I/O average time per operation graph didn't match the legend.
+
+ GitHub issue: [rabbitmq-management#384](https://github.com/rabbitmq/rabbitmq-management/issues/384)
+
+ * Definitions import and export forms are now separte to reduce the chance of user confusion.
+
+ GitHub issue: [rabbitmq-management#389](https://github.com/rabbitmq/rabbitmq-management/issues/389)
+
+ * Sample retention policies are now validated more strictly to avoid configurations that
+ are not supported and will lead to exceptions.
+
+ GitHub issue: [rabbitmq-management-agent#41](https://github.com/rabbitmq/rabbitmq-management-agent/issues/41)
+
+ * Certain stats for connections were not initialised as numerical values, which resulted in log noise.
+
+ GitHub issue: [rabbitmq-management-agent#45](https://github.com/rabbitmq/rabbitmq-management-agent/issues/45)
+
+ * UI operation for binding deletion did not respect optional (extra) binding arguments.
+
+ GitHub issue: [rabbitmq-management#243](https://github.com/rabbitmq/rabbitmq-management/issues/243)
+
+#### Enhancements
+
+ * Current virtual host is pre-selected on the "Add/update policy" form.
+
+ GitHub issue: [rabbitmq-management#382](https://github.com/rabbitmq/rabbitmq-management/issues/382)
+
+
+### MQTT Plugin
+
+#### Bug Fixes
+
+ * A non-initialized connection (e.g. one that failed early because client-provided
+ payload wasn't a valid MQTT payload) produced a crash report log entry during termination.
+
+ GitHub issue: [rabbitmq-mqtt#134](https://github.com/rabbitmq/rabbitmq-mqtt/issues/134)
+
+
+### LDAP Plugin
+
+#### Bug Fixes
+
+ * Stale connection purging in LDAP connection pool could fail
+ with a `badmatch`.
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#66](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/66)
+
+
+### Trust Store Plugin
+
+#### Enhancements
+
+ * Certificate change detection algorithm no longer uses `stat(2)` on certificate directory because
+ of its limitations that could lead to undetected changes in certain scenarios.
+
+ GitHub issue: [rabbitmq-trust-store#58](https://github.com/rabbitmq/rabbitmq-trust-store/issues/58)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+ * The plugin failed to start after being stopped and re-enabled.
+
+ GitHub issue: [rabbitmq-web-stomp#72](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/72)
+
+ * Server-initiated [consumer cancellation](https://www.rabbitmq.com/consumer-cancel.html) failed with
+ an exception.
+
+ GitHub issue: [rabbitmq-web-stomp#75](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/75)
+
+
+### Top Plugin
+
+#### Bug Fixes
+
+ * Node selection control used the wrong HTTP API path.
+
+ GitHub issue: [rabbitmq-top#15](https://github.com/rabbitmq/rabbitmq-top/issues/15)
+
+
+### Management Visualiser Plugin
+
+#### Bug Fixes
+
+ * The plugin wasn't compatible with recent `3.6.x` releases.
+
+ GitHub issue: [rabbitmq-management-visualiser#8](https://github.com/rabbitmq/rabbitmq-management-visualiser/issues/8)
+
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained. When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.10.tar.gz`.
diff --git a/release-notes/3.6.11.md b/release-notes/3.6.11.md
new file mode 100644
index 0000000000..27562355a1
--- /dev/null
+++ b/release-notes/3.6.11.md
@@ -0,0 +1,236 @@
+## RabbitMQ 3.6.11
+
+RabbitMQ `3.6.11` is a maintenance release.
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading)
+for general documentation on upgrades.
+
+âš ï¸ This release [changes how nodes compute the amount of RAM they use](https://groups.google.com/forum/#!topic/rabbitmq-users/TVZt45O3WzU) (and report). Previously the value was underreported.
+We recommend monitoring — or at least manually sampling — the [RSS of
+the Erlang VM](http://www.linfo.org/ps.html) running RabbitMQ, comparing it to the value reported in the management UI
+to compute the delta. Then adjust configured [VM high memory watermark](https://www.rabbitmq.com/alarms.html) an
+possibly provisioning more RAM as needed.
+
+This release has no other known incompatibilities with versions 3.6.7 through 3.6.10.
+See the upgrade and compatibility sections in the [3.6.7 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7) if upgrading from an earlier release.
+
+âš ï¸ This is the first release that [supports Erlang/OTP 20](https://groups.google.com/forum/#!searchin/rabbitmq-users/OTP$2020%7Csort:relevance/rabbitmq-users/_imbAavBYjY/ninEKhMYAgAJ).
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * Node RAM consumption calculation strategy [has changed](https://groups.google.com/forum/#!topic/rabbitmq-users/TVZt45O3WzU)
+ to a more precise one. Previously the value **was underreported**. It is possible to switch to the behavior in earlier versions
+ if desired. This will **affect monitoring charts** and possibly **require bumping VM memory high watermark**
+ or provisioning more RAM.
+
+ GitHub issue: [rabbitmq-server#1223](https://github.com/rabbitmq/rabbitmq-server/issues/1223), [rabbitmq-server#1270](https://github.com/rabbitmq/rabbitmq-server/pull/1270)
+
+
+ * Erlang/OTP 20 is [now supported](https://groups.google.com/forum/#!searchin/rabbitmq-users/OTP$2020%7Csort:relevance/rabbitmq-users/_imbAavBYjY/ninEKhMYAgAJ).
+
+ GitHub issues: [rabbitmq-server#1243](https://github.com/rabbitmq/rabbitmq-server/issues/1243), [rabbitmq-server#1246](https://github.com/rabbitmq/rabbitmq-server/pull/1246), [rabbitmq-federation#58](https://github.com/rabbitmq/rabbitmq-federation/pull/58), [rabbitmq-management-agent#47](https://github.com/rabbitmq/rabbitmq-management-agent/pull/47), [rabbitmq-management#415](https://github.com/rabbitmq/rabbitmq-management/pull/415), [rabbitmq-stomp#115](https://github.com/rabbitmq/rabbitmq-stomp/issues/115)
+
+ * `supervisor2`: supervisor could fail to restart failed child processes in some cases.
+ This could affect multiple plugins, e.g. federation.
+
+ Contributed by Aliaksey Artamonau (Couchbase).
+
+ GitHub issues: [rabbitmq-common#201](https://github.com/rabbitmq/rabbitmq-common/pull/201),
+ [rabbitmq-server#1238](https://github.com/rabbitmq/rabbitmq-server/pull/1238)
+
+ * Memory used by binary heap ("binaries") was reduced for some scenarios that involve mirrored queues.
+
+ GitHub issue: [rabbitmq-common#208](https://github.com/rabbitmq/rabbitmq-common/issues/208)
+
+ * Exclusive queues were not cleaned up when node failed or was considered unavailable by one of its
+ peers.
+
+ GitHub issue: [rabbitmq-server#1323](https://github.com/rabbitmq/rabbitmq-server/issues/1323)
+
+ * `rabbitmq-service.bat start` and `rabbitmq-service.bat stop` did not report access violation
+ errors.
+
+ GitHub issue: [rabbitmq-server#1324](https://github.com/rabbitmq/rabbitmq-server/issues/1324)
+
+ * When high VM memory watermarked is set to a value greater than 1.0,
+ `1.0` will be used as effective value now instead of the default (`0.4`).
+
+ GitHub issue: [rabbitmq-server#1285](https://github.com/rabbitmq/rabbitmq-server/issues/1285)
+
+#### Enhancements
+
+ * Total amount of RAM as seen by a node now can be overridden via config file.
+
+ GitHub issue: [rabbitmq-server#1224](https://github.com/rabbitmq/rabbitmq-server/issues/1224)
+
+ * Default value of `RABBITMQ_DISTRIBUTION_BUFFER_SIZE` is now `128000`. This makes inter-node communication
+ throughput more stable on 1 GBbit/s (or greater) network links.
+
+ GitHub issue: [rabbitmq-server#1306](https://github.com/rabbitmq/rabbitmq-server/issues/1306)
+
+ * Plugins that are already expanded (unarhived from `.ez` archives on node boot) won't be
+ expanded again.
+
+ Contributed by Alex Lebedeff (Mirantis).
+
+ GitHub issue: [rabbitmq-server#1226](https://github.com/rabbitmq/rabbitmq-server/pull/1226)
+
+
+### Management plugin
+
+#### Enhancements
+
+ * Listing queues, exchanges, vhosts via HTTP API is now more efficient for common cases
+ (e.g. when pagination is not used or only a subset of fields is requested).
+
+ GitHub issue: [rabbitmq-management#402](https://github.com/rabbitmq/rabbitmq-management/issues/402)
+
+ * Inter-node cluster link traffic information was missing.
+
+ GitHub issue: [rabbitmq-management#434](https://github.com/rabbitmq/rabbitmq-management/issues/434)
+
+ * Exclusive queues didn't have feature markers in the UI.
+
+ GitHub issue: [rabbitmq-management#459](https://github.com/rabbitmq/rabbitmq-management/issues/459)
+
+ * queue leader locator now has a feature marker in the UI.
+
+ GitHub issue: [rabbitmq-management#468](https://github.com/rabbitmq/rabbitmq-management/pull/468)
+
+ * It is now possible to pre-configure user permission at the time of its creation.
+
+ GitHub issue: [rabbitmq-management#441](https://github.com/rabbitmq/rabbitmq-management/issues/441)
+
+ * `rabbitmqadmin` now supports `--ssl-insecure` (`-k`), serving the same purpose as `-k` in curl
+
+ GitHub issue: [rabbitmq-management#452](https://github.com/rabbitmq/rabbitmq-management/issues/452)
+
+ * `rabbitmqadmin` now supports setting HTTP API endpoint hostname, port, username, and password
+ using a URI.
+
+ GitHub issue: [rabbitmq-management#437](https://github.com/rabbitmq/rabbitmq-management/issues/437)
+
+ * HTTP API requests now are less likely to produce log noise.
+
+ GitHub issue: [rabbitmq-web-dispatch#27](https://github.com/rabbitmq/rabbitmq-web-dispatch/issues/27)
+
+#### Bug Fixes
+
+ * queue leader migration no longer leaves statistics database records behind.
+
+ GitHub issue: [rabbitmq-management#427](https://github.com/rabbitmq/rabbitmq-management/issues/427)
+
+ * Consumer argument serialisation to JSON was fixed.
+
+ GitHub issue: [rabbitmq-management#424](https://github.com/rabbitmq/rabbitmq-management/issues/424)
+
+ * It is now again possible to create a user without a password (or password hash).
+ Such users won't be able to sign in with [internal authentication backend](https://www.rabbitmq.com/access-control.html)
+ but can be used with external authentication mechanisms (x509 certificates) and backends
+ (LDAP, HTTP, etc).
+
+ GitHib issue: [rabbitmq-management#383](https://github.com/rabbitmq/rabbitmq-management/issues/383)
+
+ * Pagination combined with a user-provided set of columns returned no results.
+
+ GitHub issue: [rabbitmq-management#404](https://github.com/rabbitmq/rabbitmq-management/issues/404)
+
+ * Regular expressions used for filtering were not correctly escaped.
+
+ GitHub issue: [rabbitmq-management#419](https://github.com/rabbitmq/rabbitmq-management/issues/419)
+
+ * `rabbitmqadmin` now handles float rates.
+
+ GitHub issue: [rabbitmq-management#457](https://github.com/rabbitmq/rabbitmq-management/issues/457)
+
+ * `rabbitmqadmin` now ignores `SIGPIPE` signals and much less likely to terminate when its output is piped to
+ another process.
+
+ GitHub issue: [rabbitmq-management#438](https://github.com/rabbitmq/rabbitmq-management/issues/438)
+
+ * HTTP requests with unintialised fields are less likely to crash log event handler.
+
+ GitHub issue: [rabbitmq-web-dispatch#27](https://github.com/rabbitmq/rabbitmq-web-dispatch/issues/27)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Federation links now recover better in certain scenarios, e.g. when one side of a link could be reset.
+
+ GitHub issue: [rabbitmq-federation#59](https://github.com/rabbitmq/rabbitmq-federation/issues/59)
+
+
+### STOMP Plugin
+
+#### Bug Fixes
+
+ * Erlang/OTP 20 is [now supported](https://groups.google.com/forum/#!searchin/rabbitmq-users/OTP$2020%7Csort:relevance/rabbitmq-users/_imbAavBYjY/ninEKhMYAgAJ).
+
+ GitHub issue: [rabbitmq-stomp#115](https://github.com/rabbitmq/rabbitmq-stomp/issues/115)
+
+
+### MQTT Plugin
+
+#### Enhancements
+
+ * No-op MQTT message retainer.
+
+ GitHub issue: [rabbitmq-mqtt#136](https://github.com/rabbitmq/rabbitmq-mqtt/issues/136)
+
+#### Bug Fixes
+
+ * MQTT connection processes now use connection log level.
+
+ GitHub issue: [rabbitmq-mqtt#142](https://github.com/rabbitmq/rabbitmq-mqtt/issues/142)
+
+
+### Consistent Hashing Exchange
+
+#### Bug Fixes
+
+ * Bucket distribution is now consistent (doesn't change) between plugin and node restarts
+ (assuming bindings haven't changed).
+
+ GitHub issue: [rabbitmq-consistent-hash-exchange#32](https://github.com/rabbitmq/rabbitmq-consistent-hash-exchange/issues/32)
+
+
+### Top Plugin
+
+#### Bug Fixes
+
+ * `rabbitmq-top` now gracefully covers scenarios when it is enabled only on a subset of cluster nodes.
+
+ GitHub issue: [rabbitmq-top#23](https://github.com/rabbitmq/rabbitmq-top/issues/23)
+
+ * Node drop down on ETS table tab redirected to the processes tab.
+
+ GitHub issue: [rabbitmq-top#24](https://github.com/rabbitmq/rabbitmq-top/issues/24)
+
+
+### Message Timestamp Plugin
+
+#### Enhancements
+
+ * Timestamp in milliseconds is also injected as a header (unless the header is present).
+
+ GitHub issue: [rabbitmq-message-timestamp#16](https://github.com/rabbitmq/rabbitmq-message-timestamp/issues/16)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained.
+When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.11.tar.gz`.
diff --git a/release-notes/3.6.12.md b/release-notes/3.6.12.md
new file mode 100644
index 0000000000..c49d0da19f
--- /dev/null
+++ b/release-notes/3.6.12.md
@@ -0,0 +1,67 @@
+## RabbitMQ 3.6.12
+
+RabbitMQ `3.6.12` is a maintenance release.
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading)
+for general documentation on upgrades.
+
+This release has no other known incompatibilities with versions 3.6.7 through 3.6.11.
+See the upgrade and compatibility sections in the [3.6.7 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7) if upgrading from an earlier release.
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * Process responsible for running the autoheal partition handling strategy
+ could run into a deadlock with its peers, preventing autoheal from completing.
+
+ GitHub issue: [rabbitmq-server#1346](https://github.com/rabbitmq/rabbitmq-server/issues/1346)
+
+ * Garbage collection of mirrored queue metrics on nodes that did not
+ host a leader or mirror for a queue affected delivery and acknowledgement rates.
+ This could result in rates being 0 or negative when they should not be.
+
+ GitHub issue: [rabbitmq-server#1340](https://github.com/rabbitmq/rabbitmq-server/issues/1340)
+
+ * Stats emission could prevent queue mirrors from performing garbage collection
+ and consume memory even when they were empty.
+
+ GitHub issue: [rabbitmq-common#220](https://github.com/rabbitmq/rabbitmq-common/pull/220) (continuation to [rabbitmq-common#196](https://github.com/rabbitmq/rabbitmq-common/issues/196))
+
+ * `RABBITMQ_SCHEDULER_BIND_TYPE` and `RABBITMQ_DISTRIBUTION_BUFFER_SIZE` now can be set via `rabbitmq-env.conf`.
+
+ GitHub issue: [rabbitmq-server#1338](https://github.com/rabbitmq/rabbitmq-server/issues/1338)
+
+
+### Shovel Management Plugin
+
+#### Bug Fixes
+
+ * Passwords in source and destination URIs are now redacted out.
+
+ GitHub issue: [rabbitmq-federation-management#15](https://github.com/rabbitmq/rabbitmq-federation-management/issues/15)
+
+
+### Federation Management Plugin
+
+#### Bug Fixes
+
+ * Passwords in upstream URIs are now redacted out.
+
+ GitHub issue: [rabbitmq-federation-management#15](https://github.com/rabbitmq/rabbitmq-federation-management/issues/15)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained.
+When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.12.tar.gz`.
diff --git a/release-notes/3.6.13.md b/release-notes/3.6.13.md
new file mode 100644
index 0000000000..14dcb66e3f
--- /dev/null
+++ b/release-notes/3.6.13.md
@@ -0,0 +1,119 @@
+## RabbitMQ 3.6.13
+
+RabbitMQ `3.6.13` is a maintenance release.
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading)
+for general documentation on upgrades.
+
+This release has no known incompatibilities with versions 3.6.7 through 3.6.12.
+See the upgrade and compatibility sections in the [3.6.7 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7) if upgrading from an earlier release.
+
+### 3.6.x Series Support Timeline
+
+Please read this node on [RabbitMQ 3.6.x series support timeline](https://groups.google.com/forum/#!msg/rabbitmq-users/kXkI-f3pgEw/UFowJIK4BQAJ).
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * Memory usage monitor uses subprocesses a lot more sparingly. They are started
+ roughly once a second or not started at all, depending on the strategy (e.g. on Windows
+ Erlang VM allocator information is used instead).
+
+ GitHub issues: [rabbitmq-server#1343](https://github.com/rabbitmq/rabbitmq-server/issues/1343), [rabbitmq-common#224](https://github.com/rabbitmq/rabbitmq-common/issues/224)
+
+ * Very busy queues are now more efficient at prioritizing consumers, avoiding
+ consumer delivery blocking when relatively long running operations (e.g. bulk flushing
+ of transient messages to disk) happen.
+
+ GitHub issues: [rabbitmq-server#1388](https://github.com/rabbitmq/rabbitmq-server/pull/1388), [rabbitmq-server#1407](https://github.com/rabbitmq/rabbitmq-server/pull/1407)
+
+ * queue leader strategies now take additional queue HA arguments into account.
+
+ GitHub issue: [rabbitmq-server#1371](https://github.com/rabbitmq/rabbitmq-server/issues/1371)
+
+ * Logging to standard output using `RABBITMQ_LOGS=-` wasn't possible on Windows.
+
+ GitHub issue: [rabbitmq-server#1348](https://github.com/rabbitmq/rabbitmq-server/issues/1348)
+
+ * queue leader locator name now can be specified as Erlang strings.
+
+ GitHub issue: [rabbitmq-server#1411](https://github.com/rabbitmq/rabbitmq-server/issues/1411)
+
+ * Direct Erlang client connections could fail with obscure messages when
+ target node was still booting.
+
+ GitHub issue: [rabbitmq-server#1356](https://github.com/rabbitmq/rabbitmq-server/issues/1356)
+
+#### Enhancements
+
+ * `supervisor2` now can be used in environments that perform hot code upgrades.
+
+ GitHub issue: [rabbitmq-common#230](https://github.com/rabbitmq/rabbitmq-common/pull/230)
+
+ * systemd service file now white lists exit code 69.
+
+ GitHub issue: [rabbitmq-server-release#51](https://github.com/rabbitmq/rabbitmq-server-release/issues/51)
+
+ * Example systemd service file now includes service restart settings.
+
+ GitHub issue: [rabbitmq-server#1359](https://github.com/rabbitmq/rabbitmq-server/issues/1359)
+
+ * queue leader locator strategy now can be configured using strings.
+
+ GitHub issue: [rabbitmq-server#1411](https://github.com/rabbitmq/rabbitmq-server/issues/1411)
+
+ * Plugin activation avoids logs things that can be confusing.
+
+ GitHub issue: [rabbitmq-server#1364](https://github.com/rabbitmq/rabbitmq-server/issues/1364)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * Minor efficiency improvements around stats collection.
+
+ GitHub issues: [rabbitmq-common#235](https://github.com/rabbitmq/rabbitmq-common/pull/235)
+
+ * jQuery upgraded to 1.12.4.
+
+ GitHub issue: [rabbitmq-management#502](https://github.com/rabbitmq/rabbitmq-management/pull/502)
+
+ * UI improvements.
+
+ * More detailed memory use breakdown on the node page.
+
+ GitHub issue: [rabbitmq-management#500](https://github.com/rabbitmq/rabbitmq-management/pull/500)
+
+ * It is now possible to configure an HTTP API path prefix for the plugin.
+
+ GitHub issue: [rabbitmq-management#481](https://github.com/rabbitmq/rabbitmq-management/issues/481)
+
+ * Queue details page no longer truncates node name.
+
+ GitHub issue: [rabbitmq-management#473](https://github.com/rabbitmq/rabbitmq-management/issues/473)
+
+ * Policy name in queue and exchange info is now a link.
+
+ GitHub issue: [rabbitmq-management#493](https://github.com/rabbitmq/rabbitmq-management/issues/493)
+
+ * `DELETE /api/connections/{name}` is now more defensive.
+
+ GitHub issue: [rabbitmq-management#497](https://github.com/rabbitmq/rabbitmq-management/issues/497)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained.
+When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.13.tar.gz`.
diff --git a/release-notes/3.6.14.md b/release-notes/3.6.14.md
new file mode 100644
index 0000000000..594bf07094
--- /dev/null
+++ b/release-notes/3.6.14.md
@@ -0,0 +1,48 @@
+## RabbitMQ 3.6.14
+
+RabbitMQ `3.6.14` is a maintenance release that primarily fixes an issue in systemd
+unit files that affected some systems. It is virtually identical to [3.6.13](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_13)
+otherwise.
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading)
+for general documentation on upgrades.
+
+This release has no known incompatibilities with versions 3.6.7 through 3.6.13.
+See the upgrade and compatibility sections in the [3.6.7 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7) if upgrading from an earlier release.
+
+### 3.6.x Series Support Timeline
+
+Please read this node on [RabbitMQ 3.6.x series support timeline](https://groups.google.com/forum/#!msg/rabbitmq-users/kXkI-f3pgEw/UFowJIK4BQAJ).
+
+
+### Debian and RPM packages
+
+#### Bug Fixes
+
+ * systemd service unit no longer has trailing comments that could lead nodes
+ into restart loops on some systems.
+
+ GitHub issues: [rabbitmq-server#1422](https://github.com/rabbitmq/rabbitmq-server/issues/1422)
+
+### Management Plugin
+
+#### Bug Fix
+
+ * Expandable sections on node metrics page failed to expand.
+
+ GitHub issues: [rabbitmq-management#507](https://github.com/rabbitmq/rabbitmq-management/issues/507)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained.
+When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.13.tar.gz`.
diff --git a/release-notes/3.6.15.md b/release-notes/3.6.15.md
new file mode 100644
index 0000000000..e7ec97b016
--- /dev/null
+++ b/release-notes/3.6.15.md
@@ -0,0 +1,142 @@
+## RabbitMQ 3.6.15
+
+RabbitMQ 3.6.15 is a maintenance release.
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the
+documentation](https://www.rabbitmq.com/upgrade.html) for general
+documentation on upgrades.
+
+This release has no known incompatibilities with versions 3.6.7 through
+3.6.14. See the upgrade and compatibility sections in the [3.6.7 release
+notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7)
+if upgrading from an earlier release.
+
+### 3.6.x Series Support Timeline
+
+Please read this node on [RabbitMQ 3.6.x series support
+timeline](https://groups.google.com/forum/#!msg/rabbitmq-users/kXkI-f3pgEw/UFowJIK4BQAJ).
+
+### Core Server
+
+#### Bug Fixes
+
+* Avoid infinite loop when dropping entries in the GM.
+
+ GitHub issue: [rabbitmq-server#1431](https://github.com/rabbitmq/rabbitmq-server/pull/1431)
+
+* Various changes to the OCF scripts.
+
+ GitHub issues: [rabbitmq-server-release#66](https://github.com/rabbitmq/rabbitmq-server-release/pull/66),
+ [rabbitmq-server-release#67](https://github.com/rabbitmq/rabbitmq-server-release/pull/67),
+ [rabbitmq-server-release#73](https://github.com/rabbitmq/rabbitmq-server-release/pull/73)
+
+#### Enhancements
+
+ * [Internal authN backend](https://www.rabbitmq.com/access-control.html) will now prohibit logins with a blank passwords. Such attempts
+ are usually made by mistake because a passwordless user that was meant to authenticate
+ using [x509 (TLS) certificates](https://github.com/rabbitmq/rabbitmq-auth-mechanism-ssl/) was not configured to use the correct [authentication
+ mechanism](https://www.rabbitmq.com/authentication.html).
+
+ Note that the same behavior can already be achieved using a [credential validator](https://www.rabbitmq.com/passwords.html),
+ so this is just an extra safety measure.
+
+ GitHub issue: [rabbitmq-server#1466](https://github.com/rabbitmq/rabbitmq-server/pull/1466)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+* Multi-input forms (such as message properties) again submit all fields.
+
+ GitHub issues: [rabbitmq-management#511](https://github.com/rabbitmq/rabbitmq-management/issues/511)
+
+ * User update form now successfully updates tags for passwordless users (or if the password is cleared at the same time).
+
+ GitHub issue: [rabbitmq-management#533](https://github.com/rabbitmq/rabbitmq-management/issues/533)
+
+* `Content-Type` header was returned twice in API responses.
+
+ GitHub issue: [rabbitmq-management#527](https://github.com/rabbitmq/rabbitmq-management/issues/527)
+
+* Avoid duplicated event handlers in the UI.
+
+ GitHub issue: [rabbitmq-management#516](https://github.com/rabbitmq/rabbitmq-management/pull/516)
+
+* Use `auto` width to make option values fully visible.
+
+ GitHub issue: [rabbitmq-management#519](https://github.com/rabbitmq/rabbitmq-management/issues/519)
+
+### MQTT Plugin
+
+#### Bug Fixes
+
+* Concurrently registering consumers could fail with a "Key exists" exception in the log.
+
+ GitHub issue: [rabbitmq-mqtt#132](https://github.com/rabbitmq/rabbitmq-mqtt/issues/132)
+
+### Federation Plugin
+
+#### Bug Fixes
+
+* Disabling policy (or plugin) now cleans up all internal exchanges and queues used by federation links.
+
+ GitHub issue: [rabbitmq-federation#63](https://github.com/rabbitmq/rabbitmq-federation/issues/63)
+
+### Standalone MacOS Package
+
+#### Bug Fixes
+
+* Standalone MacOS package now ships with Erlang/OTP 20.1 which is compatible with High Sierra.
+
+ GitHub issue: [rabbitmq-server-release#68](https://github.com/rabbitmq/rabbitmq-server-release/issues/68)
+
+### Debian Package
+
+#### Bug Fixes
+
+* Make adm the group owner for RabbitMQ log directories.
+
+ GitHub issue: [rabbitmq-server-release#70](https://github.com/rabbitmq/rabbitmq-server-release/pull/70)
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+* `https/web-mqtt` listener displayed incorrect port in management UI.
+
+ GitHub issue: [rabbitmq-web-mqtt#25](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/25)
+
+### Top Plugin
+
+#### Bug Fixes
+
+* Fix node and row selectors after jQuery upgrade to 1.12.
+
+ GitHub issue: [rabbitmq-top#29](https://github.com/rabbitmq/rabbitmq-top/pull/29)
+
+### Erlang Client
+
+#### Buf Fixes
+
+* Enable hostname verification for OTP 19.
+
+ GitHub issue: [rabbitmq-erlang-client#95](https://github.com/rabbitmq/rabbitmq-erlang-client/pull/95)
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All
+configuration and persistent message data are retained. When upgrading
+using definitions export/import from versions earlier than 3.6.0, see
+http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ
+documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains
+the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.6.15.tar.xz`.
diff --git a/release-notes/3.6.16.md b/release-notes/3.6.16.md
new file mode 100644
index 0000000000..c3f8b3c583
--- /dev/null
+++ b/release-notes/3.6.16.md
@@ -0,0 +1,143 @@
+## RabbitMQ 3.6.16
+
+RabbitMQ 3.6.16 is a maintenance release that primarily includes selected backports from
+the [3.7.x series](https://www.rabbitmq.com/changelog.html). Users of earlier 3.6.x releases
+are recommended to [upgrade](https://www.rabbitmq.com/upgrade.html) to a 3.7.x release, e.g. [3.7.6](https://github.com/rabbitmq/rabbitmq-server/releases).
+
+### Upgrades and Compatibility
+
+See the [Upgrades documentation guide](https://www.rabbitmq.com/upgrade.html) for general
+documentation on upgrades.
+
+This release nearly exclusively contains backports of bug fixes, optimisations and small enhancements
+(e.g. safer defaults) from the 3.7.x series. Some of them are **potentially breaking** but
+should only affect a very small percentage of users. They are:
+
+ * The max 255 priority cap is now enforced at all levels
+ * Default `rabbit.channel_max` value is now about 2K, a much safer value than 65K used previously
+ * Default idle LDAP connection timeout is now 300 seconds instead of infinity
+
+This release has no other known incompatibilities with versions 3.6.7 through
+3.6.15. See the upgrade and compatibility sections in the [3.6.7 release
+notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7)
+if upgrading from an earlier release.
+
+### 3.6.x Series Support Timeline
+
+Please read this note on [RabbitMQ 3.6.x series support
+timeline](https://groups.google.com/forum/#!msg/rabbitmq-users/kXkI-f3pgEw/UFowJIK4BQAJ).
+
+
+### Core Server
+
+#### Bug Fixes
+
+* queue leader locator `min-masters` incorrectly calculated the number of masters.
+
+ GitHub issue: [rabbitmq-server#1519](https://github.com/rabbitmq/rabbitmq-server/issues/1519)
+
+* Maximum supported number of queue priorities (255) is now enforced.
+
+ GitHub issue: [rabbitmq-server#1590](https://github.com/rabbitmq/rabbitmq-server/issues/1590)
+
+#### Enhancements
+
+ * On Erlang 20.2.3 or later, more optimal memory allocators will be used. On some workloads that leads
+ to [significant reduction in node's RAM consumption](https://groups.google.com/d/msg/rabbitmq-users/LSYaac9frYw/LNZDZUlrBAAJ). The change is workload-specific, however, on the workloads
+ tested that do introduce a negative effect (more RAM used) the difference was very small (about 1%).
+
+
+ It is possible to go back to the previous settings, `+MBas aoffcbf +MHas aoffcbf +MBlmbcs 5120 +MHlmbcs 5120 +MMmcs 10`,
+ by specifying them in the `RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS` [environment variable](https://www.rabbitmq.com/configure.html#customise-environment).
+
+ On Erlang/OTP releases older than 20.2.3 there will be no default changes.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1604](https://github.com/rabbitmq/rabbitmq-server/pull/1604), [rabbitmq/rabbitmq-server#1612](https://github.com/rabbitmq/rabbitmq-server/pull/1612)
+
+ * `queue.delete` operations will now force delete queues that don't have a [promotable
+ master](https://www.rabbitmq.com/ha.html#unsynchronised-mirrors).
+
+ GitHub issue: [rabbitmq-server#1501](https://github.com/rabbitmq/rabbitmq-server/issues/1501)
+
+ * `ha-promote-on-failure` is a new mirrored queue setting that's similar to [`ha-promote-on-shutdown`](https://www.rabbitmq.com/ha.html#unsynchronised-mirrors)
+ but for unexpected leader replica termination.
+
+ GitHub issue: [rabbitmq-server#1578](https://github.com/rabbitmq/rabbitmq-server/pull/1578)
+ * Lock contention in internal database is now much lower when a node with a lot of exclusive queues
+ shuts down or is otherwise considered to be unavailable by peers. This means fewer CPU cycles
+ spent cleaning up the exclusive queues.
+
+ GitHub issue: [rabbitmq-server#1570](https://github.com/rabbitmq/rabbitmq-server/pull/1570)
+
+ * Default [max number of channels allowed on a connection](https://www.rabbitmq.com/networking.html#tuning-for-large-number-of-connections-channel-max) (a.k.a. `channel_max`) has been lowered from `65535` to `2047`.
+ The new default is much safer and will reduce the effect application channel leaks have on node resource consumption. **This is a potentially breaking change**.
+
+ Systems where a lot (at least hundreds) of channels *per connection* are used should explicitly override `channel_max` in the [config file](https://www.rabbitmq.com/configure.html) to a higher suitable value, e.g.:
+
+``` erlang
+[
+ {rabbit, [
+ {channel_max, 4000}
+ ]}
+].
+```
+
+ GitHub issue: [rabbitmq-server#1593](https://github.com/rabbitmq/rabbitmq-server/issues/1593)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Eliminated unnecessary encoding conversions that could lead to increased memory consumption.
+
+ GitHub issue: [rabbitmq-management#550](https://github.com/rabbitmq/rabbitmq-management/pull/550)
+
+ * `rabbitmqadmin` could run into an exception with certain formatting settings on Python 2.x.
+
+ GitHub issue: [rabbitmq-management#568](https://github.com/rabbitmq/rabbitmq-management/issues/568)
+
+ * `rabbitmqadmin` incorrectly encoded spaces in connection name values
+
+ GitHub issue: [rabbitmq-management#579](https://github.com/rabbitmq/rabbitmq-management/pull/579)
+
+ * HTTP API stats documentation used incorrect field names.
+
+ GitHub issue: [rabbitmq-management#561](https://github.com/rabbitmq/rabbitmq-management/pull/561)
+
+#### Enhancements
+
+ * Reduced padding usage in tables in the management UI.
+
+ GitHub issue: [rabbitmq-management#541](https://github.com/rabbitmq/rabbitmq-management/pull/541)
+
+
+### LDAP Plugin
+
+#### Bug Fixes
+
+ * LDAP server-initiated connection closure was not handled gracefully by connection pool.
+ Kudos to Adam Gardner for [doing all the hard work](https://groups.google.com/forum/#!topic/rabbitmq-users/4Gva3h-yJzM) investigating this issue.
+
+ GitHub issues: [rabbitmq-auth-backend-ldap#82](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/82), [rabbitmq-auth-backend-ldap#90](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/90)
+
+ * `rabbit.idle_timeout` now defaults to 300 seconds instead of `infinity`.
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#81](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/81)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All
+configuration and persistent message data are retained. When upgrading
+using definitions export/import from versions earlier than 3.6.0, see
+http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ
+documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains
+the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.6.16.tar.xz`.
diff --git a/release-notes/3.6.2.md b/release-notes/3.6.2.md
new file mode 100644
index 0000000000..c5e1558465
--- /dev/null
+++ b/release-notes/3.6.2.md
@@ -0,0 +1,375 @@
+## RabbitMQ 3.6.2
+
+RabbitMQ `3.6.2` is a maintenance release that includes a [new statistics
+collector in the management plugin](https://github.com/rabbitmq/rabbitmq-management/issues/41).
+
+### Management Plugin Incompatibility with Earlier 3.6.x Releases
+
+Because this version includes significant changes to the management plugin,
+it should not be deployed into clusters with mixed `3.6.x` node versions
+unless **all nodes that have the management plugin enabled** run `3.6.2`.
+Otherwise HTTP API requests will fail with exceptions in some scenarios and parts of
+management UI may not be updated.
+
+There are no other known incompatibilities with earlier `3.6.x` releases.
+
+
+### OpenSSL Requirement for Standalone Mac Release
+
+Standalone Mac OS X package now requires OpenSSL 1.0.x to be provided by the system, for example,
+from Homebrew.
+
+### Optional `socat` Package Dependency for Distributions that Use `systemd`
+
+Starting with `3.6.2`, RabbitMQ Debian and RPM packages have an optional
+[dependency on socat](https://groups.google.com/forum/#!searchin/rabbitmq-users/socat/rabbitmq-users/ktzOsgNEBkY/cOlF_eP6AAAJ). When
+installing using `dpkg`, this dependency won't be automatically installed. To install it manually, use
+
+ sudo apt-get install socat
+
+
+### Server
+
+#### Bug Fixes
+
+ * Channel operations that time out no longer produce noisy crash reports in
+ RabbitMQ log files
+
+ GitHub issue: [rabbitmq-common#63](https://github.com/rabbitmq/rabbitmq-common/issues/63)
+
+ * Channel operation timeout default is bumped to 15 seconds
+
+ GitHub issue: [rabbitmq-server/issues/667](https://github.com/rabbitmq/rabbitmq-server/issues/667)
+
+ * Windows service will pick up RabbitMQ config file regardless of whether
+ it was present during service installation
+
+ GitHub issue: [rabbitmq-server#659](https://github.com/rabbitmq/rabbitmq-server/issues/659)
+
+ * Erlang distribution failures for `rabbitmqctl` and cross-node links will
+ provide more detail when running on Erlang 19 (scheduled for release in Q3 2016).
+
+ GitHub issue: [rabbitmq-server#401](https://github.com/rabbitmq/rabbitmq-server/issues/401)
+
+ * `rabbitmqctl list_consumers` now lists all consumers on a given queue
+
+ Previously it would only list one.
+
+ Contributed by Alexey Lebedeff (Mirantis).
+
+ GitHub issue: [rabbitmq-server#701](https://github.com/rabbitmq/rabbitmq-server/issues/701)
+
+ * `rabbitmqctl list_queues` now correctly outputs rows for unavailable queues
+
+ Contributed by Alexey Lebedeff (Mirantis).
+
+ GitHub issue: [rabbitmq-server#696](https://github.com/rabbitmq/rabbitmq-server/issues/696)
+
+ * `rabbitmqctl set_disk_free_limit mem_relative` erroneously rejected values greater than `1.0`
+
+ GitHub issue: [rabbitmq-server#717](https://github.com/rabbitmq/rabbitmq-server/issues/717)
+
+ * Messages with an invalid client-provided `x-death` header value caused queue process termination
+
+ GitHub issue: [rabbitmq-server#767](https://github.com/rabbitmq/rabbitmq-server/issues/767)
+
+ * Messages with priorities higher than queue max are now assigned max configured priority
+ and no longer cause an unhandled exception that results in queue process restart
+
+ GitHub issue: [rabbitmq-server#795](https://github.com/rabbitmq/rabbitmq-server/issues/795)
+
+ * Policy-related `rabbitmqctl` commands could return unformatted messages
+
+ GitHub issue: [rabbitmq-server#742](https://github.com/rabbitmq/rabbitmq-server/issues/742)
+
+ * `RABBITMQ_IO_THREAD_POOL_SIZE` is no longer ignored by `rabbitmq-server.bat`
+
+ GitHub issue: [rabbitmq-server#705](https://github.com/rabbitmq/rabbitmq-server/issues/705)
+
+ * Deleting a vhost in parallel with updating a policy in it resulted
+ in unhandled exceptions
+
+ GitHub issues: [rabbitmq-server#755](https://github.com/rabbitmq/rabbitmq-server/issues/755),
+ [rabbitmq-server#759](https://github.com/rabbitmq/rabbitmq-server/issues/759),
+ [rabbitmq-server#744](https://github.com/rabbitmq/rabbitmq-server/issues/744)
+
+#### Enhancements
+
+ * Connections now emit stats unconditionally when they are blocked and unblocked
+ by resource alarms. This makes management UI and HTTP API-reported data to be more up-to-date
+
+ GitHub issue: [rabbitmq-server#679](https://github.com/rabbitmq/rabbitmq-server/issues/679)
+
+ * New (node-local) health check command
+
+ `rabbitmqctl node_health_check` is a new command that performs basic health check of a node
+
+ GitHub issue: [rabbitmq-server#398](https://github.com/rabbitmq/rabbitmq-server/issues/398)
+
+ * Automatic restart policy enabled for Windows service
+
+ GitHub issue: [rabbitmq-server#645](https://github.com/rabbitmq/rabbitmq-server/issues/645)
+
+ * Default number of async I/O VM threads is now calculated based on the
+ number of available CPU cores
+
+ GitHub issue: [rabbitmq-server#151](https://github.com/rabbitmq/rabbitmq-server/issues/151)
+
+ * `rabbitmqctl list_queues` now supports new flags, `--offline` and `--online`, that limit
+ result to only unavailable or available queues (queue leaders, to be more precise)
+
+ Contributed by Alexey Lebedeff (Mirantis).
+
+ GitHub issue: [rabbitmq-server#688](https://github.com/rabbitmq/rabbitmq-server/issues/688)
+
+ * RabbitMQ will no longer log a warning about disabled kernel polling on Windows
+
+ The runtime does not support kernel polling (I/O completion ports) on Windows,
+ so there is nothing the user can do about it.
+
+ GitHub issue: [rabbitmq-server#695](https://github.com/rabbitmq/rabbitmq-server/issues/695)
+
+ * Queue index is now updated in batches when messages are requeued
+
+ GitHub issue: [rabbitmq-server#343](https://github.com/rabbitmq/rabbitmq-server/issues/343)
+
+
+### Management plugin
+
+#### Bug Fixes
+
+ * Samples (stats) for abnormally terminated connections and channels will now be cleaned up more aggressively
+
+ GitHub issue: [rabbitmq-management#198](https://github.com/rabbitmq/rabbitmq-management/issues/198)
+
+ * Cluster name could be returned by HTTP API as a non-string value
+
+ GitHub issue: [rabbitmq-management#143](https://github.com/rabbitmq/rabbitmq-management/issues/143)
+
+ * Improved IE 11 compatibility
+
+ GitHub issue: [rabbitmq-management#123](https://github.com/rabbitmq/rabbitmq-management/issues/123)
+
+#### Enhancements
+
+ * Management plugin has a new, better parallelised event collector which is less likely to
+ fall behind.
+
+ GitHub issues: [rabbitmq-management#41](https://github.com/rabbitmq/rabbitmq-management/issues/41),
+ [rabbitmq-management#166](https://github.com/rabbitmq/rabbitmq-management/issues/166),
+ [rabbitmq-management#173](https://github.com/rabbitmq/rabbitmq-management/issues/173),
+ [rabbitmq-management#185](https://github.com/rabbitmq/rabbitmq-management/issues/185),
+ [rabbitmq-management#174](https://github.com/rabbitmq/rabbitmq-management/issues/174)
+
+ * Clients now can provide a human-readable connection name that will be displayed
+ in the management UI. Currently Java, .NET and Erlang clients support this.
+ In order to use this feature, set the `connection_name` key in client properties.
+ Note that this name doesn't have to be unique and cannot be used as a connection identifier,
+ for example, in HTTP API requests.
+
+ GitHub issue: [rabbitmq-server#104](https://github.com/rabbitmq/rabbitmq-server/issues/104)
+
+ * `GET /api/nodes/{node}/memory` and `GET /api/nodes/{node}/memory/relative` are new HTTP API
+ endpoints that return memory usage breakdown in absolute (same as `rabbitmqctl status`) and
+ relative terms.
+
+ GitHub issue: [rabbitmq-management#161](https://github.com/rabbitmq/rabbitmq-management/issues/161)
+
+ * HTTPS related improvements in `rabbitmqadmin`.
+
+ GitHub issues: [rabbitmq-management#152](https://github.com/rabbitmq/rabbitmq-management/issues/152), [rabbitmq-management#151](https://github.com/rabbitmq/rabbitmq-management/issues/151), [rabbitmq-management#149](https://github.com/rabbitmq/rabbitmq-management/issues/149).
+
+ * Policies now can be listed (read) by the users tagged with `management` and `monitoring`
+
+ As well as those tagged with `policymaker` and `administrator`, of course.
+
+ GitHub issue: [rabbitmq-management#156](https://github.com/rabbitmq/rabbitmq-management/issues/156)
+
+ * Effective rates mode is now displayed for cluster nodes by default
+
+ GitHub issue: [rabbitmq-management#177](https://github.com/rabbitmq/rabbitmq-management/issues/177)
+
+
+### MQTT plugin
+
+#### Bug Fixes
+
+ * Resource alarms are handled correctly by MQTT connections
+
+ GitHub issues: [rabbitmq-mqtt#62](https://github.com/rabbitmq/rabbitmq-mqtt/issues/62)
+
+ * Session [pre-existing] presence is now correctly communicated to clients
+
+ GitHub issue: [rabbitmq-mqtt#61](https://github.com/rabbitmq/rabbitmq-mqtt/issues/61)
+
+#### Enhancements
+
+ * Connections now emit stats unconditionally when they are blocked and unblocked
+ by resource alarms. This makes management UI and HTTP API-reported data to be more up-to-date.
+
+ GitHub issue: [rabbitmq-mqtt#71](https://github.com/rabbitmq/rabbitmq-mqtt/issues/71)
+
+ * More connection details for MQTT connections reported to management UI
+
+ GitHub issue: [rabbitmq-mqtt#66](https://github.com/rabbitmq/rabbitmq-mqtt/pull/66)
+
+### STOMP plugin
+
+#### Bug Fixes
+
+ * Resource alarms are handled correctly by STOMP connections
+
+ GitHub issues: [rabbitmq-stomp#68](https://github.com/rabbitmq/rabbitmq-stomp/issues/68),
+ [rabbitmq-stomp#67](https://github.com/rabbitmq/rabbitmq-stomp/issues/67)
+
+#### Enhancements
+
+ * Connections now emit stats unconditionally when they are blocked and unblocked
+ by resource alarms. This makes management UI and HTTP API-reported data to be more up-to-date.
+
+ GitHub issue: [rabbitmq-stomp#70](https://github.com/rabbitmq/rabbitmq-stomp/issues/70)
+
+ * More connection details for STOMP connections reported to management UI
+
+ GitHub issue: [rabbitmq-stomp#55](https://github.com/rabbitmq/rabbitmq-stomp/issues/55)
+
+
+
+### Web STOMP plugin
+
+#### Enhancements
+
+ * More connection details for STOMP-over-WebSockets connections reported to management UI
+
+ GitHub issue: [rabbitmq-web-stomp#45](https://github.com/rabbitmq/rabbitmq-web-stomp/pull/45)
+
+ * The plugin now sends a protocol header (`Sec-WebSocket-Protocol`) response when
+ client presents it.
+
+ GitHub issue: [rabbitmq-web-stomp#53](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/53)
+
+
+### Web MQTT plugin
+
+#### Bug Fixes
+
+ * More connection details for MQTT-over-WebSockets connections reported to management UI
+
+ GitHub issue: [rabbitmq-web-mqtt#3](https://github.com/rabbitmq/rabbitmq-web-mqtt/pull/3)
+
+ * Plugin deactivation now correctly stops TCP listener
+
+ GitHub issue: [rabbitmq-web-mqtt#7](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/7)
+
+
+### .NET client
+
+#### Bug Fixes
+
+ * Compatibility with SQL Server 2014 CLR restored
+
+ GitHub issue: [rabbitmq-dotnet-client#167](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/167)
+
+ * Autorecovering connections now respect all provided hostnames
+ when reconnecting.
+
+ GitHub issue: [rabbitmq-dotnet-client#157](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/157)
+
+ * `ConnectionFactory#CreateConnection` now respects all provided hostnames
+ when automatic connection recovery is disabled.
+
+ GitHub issue: [rabbitmq-dotnet-client#176](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/176)
+
+### Erlang client
+
+#### Bug Fixes
+
+ * Certain channel failures resulted in a race condition during process [tree] shutdown.
+
+ GitHub issue: [rabbitmq-erlang-client#42](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/42)
+
+
+### LDAP Authentication/Authorisation Backend
+
+#### Bug Fixes
+
+ * LDAP connection pool is now more resilient to TCP connection closure/loss on Erlang/OTP 18.3
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#41](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/41)
+
+ * Non-existent group in `tag_queries` shouldn't terminate authorisation
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#15](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/15)
+
+ * `attribute` query should not fail when multiple values are returned
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#16](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/16)
+
+#### Enhancements
+
+ * Virtual host is now available as a variable in `tag_queries`
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#13](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/13)
+
+ * Default LDAP connection pool size was increased from `10` to `64`
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#35](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/35)
+
+
+### HTTP Authentication/Authorisation Backend
+
+#### Enhancements
+
+ * The plugin now uses HTTP 1.1 and keep-alive connections for requests.
+
+ GitHub issue: [rabbitmq-auth-backend-http#20](https://github.com/rabbitmq/rabbitmq-auth-backend-http/issues/20)
+
+ * It is now possible to configure the plugin to use `POST` requests instead of
+ `GET`, so that no sensitive information is logged.
+
+ GitHub issue: [rabbitmq-auth-backend-http#7](https://github.com/rabbitmq/rabbitmq-auth-backend-http/issues/7)
+
+ * The plugin now supports HTTP client TLS options, so authenticating apps can use HTTPS
+
+ GitHub issue: [rabbitmq-auth-backend-http#29](https://github.com/rabbitmq/rabbitmq-auth-backend-http/issues/29)
+
+ * Vhost access requests now include client IP address
+
+ GitHub issue: [rabbitmq-auth-backend-http#33](https://github.com/rabbitmq/rabbitmq-auth-backend-http/pull/33)
+
+ Contributed by Abdulrazak Alkl.
+
+### Event Exchange plugin
+
+#### Enhancements
+
+ * Policy events now include a vhost field
+
+ GitHub issue: [rabbitmq-event-exchange#17](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/17)
+
+ * Binding events now include a vhost field
+
+ GitHub issue: [rabbitmq-event-exchange#9](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/9)
+
+ * Millesecond resolution timestamp of events is now back as a message header
+
+ GitHub issue: [rabbitmq-event-exchange#12](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/12)
+
+ * `user.authentication.success` no longer has the `vhost` field
+ since vhost is only available at a later point. `connection.created`
+ events should be used to track successful connections if vhost
+ information is desired.
+
+ GitHub issue: [rabbitmq-event-exchange#13](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/13)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained. When upgrading using definitions export/import from versions earlier than 3.6.1, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading). All nodes that have RabbitMQ management plugin enabled
+must be upgraded in lock step.
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.2.tar.gz`.
diff --git a/release-notes/3.6.3.md b/release-notes/3.6.3.md
new file mode 100644
index 0000000000..dd3e6ed9ba
--- /dev/null
+++ b/release-notes/3.6.3.md
@@ -0,0 +1,218 @@
+## RabbitMQ 3.6.3
+
+RabbitMQ `3.6.3` is a maintenance release that includes
+bug fixes, packaging and build system enhancements.
+
+As of `3.6.3`, RabbitMQ also comes with several additional plugins in the distribution:
+
+ * [rabbitmq_top](http://github.com/rabbitmq/rabbitmq-top), which helps identify (Erlang) processes
+ that consume most RAM and CPU resources
+
+ * [rabbitmq_trust_store](https://github.com/rabbitmq/rabbitmq-trust-store) which manages
+ trusted client x509/TLS certificates
+
+ * [rabbitmq_jms_topic_exchange](https://github.com/rabbitmq/rabbitmq-jms-topic-exchange/) which, together with [RabbitMQ JMS Java client](https://github.com/rabbitmq/rabbitmq-jms-client), provides support for the JMS 1.1 API
+ atop RabbitMQ.
+
+### Breaking Changes
+
+ * Debian package now [supports systemd and also drops support for some older distributions](https://groups.google.com/forum/#!topic/rabbitmq-users/orFuEYExcxk).
+
+ * All artifacts in this release are signed with a [new 4096-bit OpenGPG key](https://groups.google.com/forum/#!msg/rabbitmq-users/BO5cmEsdEhc/Jupz1_Q4AwAJ).
+
+### Known Issues on Erlang 19.0
+
+An undocumented runtime metric change in Erlang 19.0 [breaks management plugin](https://github.com/rabbitmq/rabbitmq-management/issues/244).
+
+### Server
+
+#### Bug Fixes
+
+ * (Newly elected) queue leader process could terminate immediately after promotion
+
+ GitHub issue: [rabbitmq-server#812](https://github.com/rabbitmq/rabbitmq-server/issues/812)
+
+ * Priority queue mirror could fail to synchronize after restart
+
+ GitHub issues: [rabbitmq-server#687](https://github.com/rabbitmq/rabbitmq-server/issues/687), [rabbitmq-server#802](https://github.com/rabbitmq/rabbitmq-server/issues/802)
+
+ * Shutdown timeouts for worker processes are now much more reasonable.
+ Earlier the timeouts were virtually infinite which could lead to a deadlock during node shutdown.
+
+ GitHub issue: [rabbitmq-server#541](https://github.com/rabbitmq/rabbitmq-server/issues/541)
+
+ * Purging of a durable queue could result in undelivered publisher confirms.
+ Contributed by Andreas Ländle.
+
+ GitHub issue: [rabbitmq-server#854](https://github.com/rabbitmq/rabbitmq-server/issues/854)
+
+ * Old incarnations of queue mirrors are now ensured to be stopped before new ones are started
+
+ GitHub issue: [rabbitmq-server#863](https://github.com/rabbitmq/rabbitmq-server/issues/863)
+
+#### Enhancements
+
+ * `systemd` support in Debian and RPM packages
+
+ Contributed by Alexey Lebedeff (Mirantis).
+
+ GitHub issue: [rabbitmq-server#570](https://github.com/rabbitmq/rabbitmq-server/issues/570)
+
+ * Debian repository and release artifacts are now signed with a [new 4096-bit OpenGPG key](https://groups.google.com/forum/#!msg/rabbitmq-users/BO5cmEsdEhc/Jupz1_Q4AwAJ)
+
+ GitHub issue: [rabbitmq-server#718](https://github.com/rabbitmq/rabbitmq-server/issues/718)
+
+ * More resilient `worker_pool`, particularly important for LDAP plugin users
+
+ GitHub issue: [rabbitmq-server#834](https://github.com/rabbitmq/rabbitmq-server/issues/834)
+
+ * More efficient file handle management
+
+ GitHub issue: [rabbitmq-server#828](https://github.com/rabbitmq/rabbitmq-server/issues/828)
+
+ * Test suite ported to Common Test
+
+ Plugin developers will be encouraged to port their test suites to Common Test
+ in the future.
+
+ GitHub issue: [rabbitmq-server#725](https://github.com/rabbitmq/rabbitmq-server/issues/725)
+
+ * Compilation on [Erlang/OTP 19.0](http://erlang.org/download/otp_src_19.0.readme).
+
+ GitHub issue: [rabbitmq-server#860](https://github.com/rabbitmq/rabbitmq-server/issues/860)
+
+### Management Plugin
+
+#### Enhancements
+
+ * New API endpoint for reporting ETS tables memory consumption (absolute and relative)
+
+ GitHub issue: [rabbitmq-management#219](https://github.com/rabbitmq/rabbitmq-management/issues/219)
+
+ * Node information page now includes GC and context switching stats
+
+ GitHub issue: [rabbitmq-management#208](https://github.com/rabbitmq/rabbitmq-management/issues/208)
+
+ * Connection, channel, and queue information pages now include several runtime metrics
+
+ GitHub issues: [rabbitmq-management#209](https://github.com/rabbitmq/rabbitmq-management/issues/209), [rabbitmq-management#210](https://github.com/rabbitmq/rabbitmq-management/issues/210), [rabbitmq-management#211](https://github.com/rabbitmq/rabbitmq-management/issues/211)
+
+ * Node metrics now include information about how long it takes to obtain a file handle
+
+ GitHub issue: [rabbitmq-server#825](https://github.com/rabbitmq/rabbitmq-server/issues/825)
+
+ * Partial HTTPS support in `rabbitmqadmin` on Python versions older than `2.7.9`
+
+ GitHub issue: [rabbitmq-management#225](https://github.com/rabbitmq/rabbitmq-management/issues/225)
+
+#### Bug Fixes
+
+ * A channel statistics-related memory leak fixed.
+
+ GitHub issue: [rabbitmq-management#214](https://github.com/rabbitmq/rabbitmq-management/issues/214)
+
+ * Stats collectors event queue is now bounded (again)
+
+ GitHub issue: [rabbitmq-management#221](https://github.com/rabbitmq/rabbitmq-management/issues/221)
+
+ * Stats table name was logged as a tuple
+
+ GitHub issue: [rabbitmq-management#206](https://github.com/rabbitmq/rabbitmq-management/issues/206)
+
+
+### .NET Client
+
+#### Bug Fixes
+
+ * Setting user-provided connection name didn't have an effect.
+
+ GitHub issue: [rabbitmq-dotnet-client#187](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/187)
+
+ * `IConnection#CreateModel` could throw a `NullPointerException` due to a race
+ condition.
+
+ GitHub issue: [rabbitmq-dotnet-client#185](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/185)
+
+ * Connections with automatic recovery enabled ignored ports from the hostname
+ list.
+
+ GitHub issues: [rabbitmq-dotnet-client#157](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/157),
+ [rabbitmq-dotnet-client#190](https://github.com/rabbitmq/rabbitmq-dotnet-client/pull/190)
+
+ * Fixes an underflow in delivery tag handling after connection recovery
+
+ Contributed by @harishvadali and @Entroper.
+
+ GitHub issue: [rabbitmq-dotnet-client#200](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/200)
+
+#### Enhancements
+
+ * New [FAKE](https://fsharp.github.io/FAKE/) and [Paket](https://fsprojects.github.io/Paket/)-based build tooling: development on OS X and Linux with Mono is now
+ possible again.
+
+ GitHub issue: [rabbitmq-dotnet-client#169](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/169)
+
+
+### LDAP Plugin
+
+#### Enhancements
+
+ * Nested groups are now supported with a separate query type
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#3](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/3)
+
+#### Bug Fixes
+
+ * Query failures (e.g. `noSuchObject` responses) from LDAP servers are now correctly treated as `false` in
+ `or` and `and` queries instead of erroring.
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#48](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/48)
+
+### Trust Store Plugin
+
+#### Bug Fixes
+
+ * Invalid/unparseable certificates no longer prevent other certificates from (re)loading
+
+ GitHub issue: [rabbitmq-trust-store#27](https://github.com/rabbitmq/rabbitmq-trust-store/issues/27)
+
+#### Enhancements
+
+ * Certificate verification now can validate only leaf certificates.
+ This is more convenient in some cases. It will be one of the opt-in modes
+ in future versions.
+
+ GitHub issue: [rabbitmq-trust-store#34](https://github.com/rabbitmq/rabbitmq-trust-store/issues/34)
+
+ * The plugin now logs when a certificate is added (loaded) or removed (unloaded)
+
+ GitHub issue: [rabbitmq-trust-store#28](https://github.com/rabbitmq/rabbitmq-trust-store/issues/28)
+
+ * The plugin now provides a function for listing trust store certificates
+ via `rabbitmqctl eval`: `rabbitmqctl eval 'io:format(rabbit_trust_store:list()).'`
+
+ GitHub issue: [rabbitmq-trust-store#31](https://github.com/rabbitmq/rabbitmq-trust-store/issues/31)
+
+### Erlang Client
+
+#### Enhancements
+
+ * `amqp_connection:close/4` is now exported
+
+ GitHub issue: [rabbitmq-erlang-client#59](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/59)
+
+#### Bug Fixes
+
+ * The client now compiles on Erlang/OTP 19.0
+
+ GitHub issue: [rabbitmq-erlang-client#41](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/41)
+
+### Delayed Message Exchange Plugin
+
+#### Enhacements
+
+ * Efficiency improvements when publications and delayed delivery happen concurrently.
+
+ Contributed by Richard Larocque.
+
+ GitHub issues: [rabbitmq-delayed-message-exchange#51](https://github.com/rabbitmq/rabbitmq-delayed-message-exchange/pull/51), [rabbitmq-delayed-message-exchange#54](https://github.com/rabbitmq/rabbitmq-delayed-message-exchange/pull/54)
diff --git a/release-notes/3.6.4.md b/release-notes/3.6.4.md
new file mode 100644
index 0000000000..f2b3c86f78
--- /dev/null
+++ b/release-notes/3.6.4.md
@@ -0,0 +1,77 @@
+## RabbitMQ 3.6.4
+
+`3.6.4` is a maintanence release.
+
+
+### Server
+
+#### Bug Fixes
+
+ * Nodes on Windows will successfully start if Erlang is installed in a directory with spaces
+ in the path.
+
+ GitHub issue: [rabbitmq-server#859](https://github.com/rabbitmq/rabbitmq-server/issues/859)
+
+ * Node health check is now node-local, as it was meant to be.
+
+ GitHub issue: [rabbitmq-server#818](https://github.com/rabbitmq/rabbitmq-server/issues/818)
+
+ * Queue deletion and termination will no longer leave "tomb stone" messages in message store
+ files. This eliminates the confusing behavior when a node or cluster having no messages
+ enqueued could have many thousands of such "marker" messages in the message store.
+
+ GitHub issue: [rabbitmq-server#839](https://github.com/rabbitmq/rabbitmq-server/issues/839)
+
+ * `rabbitmqctl cluster_status` will now output an accurate error message
+ if invoked shortly after a node was removed from the cluster and the node cannot be
+ contacted.
+
+ GitHub issue: [rabbitmq-server#894](https://github.com/rabbitmq/rabbitmq-server/pull/894)
+
+ Contributed by Peter Lemenkov (Red Hat)
+
+#### Enhancements
+
+ * Erlang VM scheduler binding type default has changed to `db`. This means fewer
+ CPU context switches for some workloads.
+
+ GitHub issue: [rabbitmq-server#612](https://github.com/rabbitmq/rabbitmq-server/issues/612)
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * HTTP API is now compatible with Erlang 19.0.
+
+ GitHub issue: [rabbitmq-management#244](https://github.com/rabbitmq/rabbitmq-management/issues/244)
+
+ * Temporary tables are no longer named using atoms, preventing a potential
+ exhaustion of the runtime atom table.
+
+ GitHub issue: [rabbitmq-management#245](https://github.com/rabbitmq/rabbitmq-management/issues/245)
+
+ * Cluster links and traffic charts are correctly displayed again
+
+ GitHub issue: [rabbitmq-management#250](https://github.com/rabbitmq/rabbitmq-management/issues/250)
+
+ * Runtime metric charts now have a description pop-up associated with them.
+
+ GitHub issue: [rabbitmq-management#247](https://github.com/rabbitmq/rabbitmq-management/issues/247)
+
+### AMQP 1.0 plugin
+
+#### Bug fixes
+
+ * When an AMQP 1.0 specified its own list of source outcomes in a link
+ attachment, it caused a crash of its session on the server. Now a
+ protocol error is raised if the list contains invalid outcomes.
+
+ GitHub issue: [rabbitmq-amqp1.0#31](https://github.com/rabbitmq/rabbitmq-amqp1.0/issues/31)
+
+### Java Client
+
+#### Enhancements
+
+ * The client will now try to use TLS v1.2 and v1.1 before falling back to v1.0 by default.
+
+ GitHub issue: [rabbitmq-java-client#155](https://github.com/rabbitmq/rabbitmq-java-client/issues/155)
diff --git a/release-notes/3.6.5.md b/release-notes/3.6.5.md
new file mode 100644
index 0000000000..461146a8ab
--- /dev/null
+++ b/release-notes/3.6.5.md
@@ -0,0 +1,24 @@
+## RabbitMQ 3.6.5
+
+`3.6.5` is a maintenance release.
+
+### Server
+
+#### Bug Fixes
+
+ * Loading JSON definitions that included queue leader locator policies could prevent node
+ from starting.
+
+ GitHub issue: [rabbitmq-server#904](https://github.com/rabbitmq/rabbitmq-server/issues/904)
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Rate stats returned by `GET /api/overview` were always `0.0`.
+
+ GitHub issue: [rabbitmq-management#266](https://github.com/rabbitmq/rabbitmq-management/issues/266)
+
+ * Some message rates chart colors were ambiguous.
+
+ GitHub issue: [rabbitmq-management#224](https://github.com/rabbitmq/rabbitmq-management/issues/224)
diff --git a/release-notes/3.6.6.md b/release-notes/3.6.6.md
new file mode 100644
index 0000000000..2ee6203046
--- /dev/null
+++ b/release-notes/3.6.6.md
@@ -0,0 +1,234 @@
+## RabbitMQ 3.6.6
+
+`3.6.6` is a maintenance release that includes a fix for an **important security vulnerability** ([CVE-2016-9877](https://pivotal.io/security/cve-2016-9877)) in the MQTT plugin.
+
+### Upgrades and Compatibility
+
+> :warning:
+> **Upgrading from versions between 3.6.0 and 3.6.5 to this release requires a cluster shutdown.**
+This is also true for versions prior to 3.6.0.
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+### Server
+
+#### Bug Fixes
+
+ * Changing queue mode to `lazy` could block queue process in certain circumstances.
+
+ GitHub issue: [rabbitmq-server#850](https://github.com/rabbitmq/rabbitmq-server/issues/850)
+
+ * Mirroring implementation didn't handle certain internal protocol messages arriving
+ out of order.
+
+ GitHub issue: [rabbitmq-server#922](https://github.com/rabbitmq/rabbitmq-server/issues/922)
+
+ * Mirror set changes weren't handled gracefully in certain circumstances (e.g. in the middle of a mirror promotion).
+
+ GitHub issue: [rabbitmq-server#914](https://github.com/rabbitmq/rabbitmq-server/issues/914)
+
+ * Mirrored queue could terminate after promotion if HA policy was reapplied during sync.
+
+ GitHub issue: [rabbitmq-server#803](https://github.com/rabbitmq/rabbitmq-server/issues/803)
+
+ * Autoheal partition handling mode could run into a multi-node deadlock.
+
+ GitHub issue: [rabbitmq-server#928](https://github.com/rabbitmq/rabbitmq-server/issues/928)
+
+ * Queue mirror group membership now correctly handles certain partial partition cases.
+
+ GitHub issues: [rabbitmq-server#950](https://github.com/rabbitmq/rabbitmq-server/issues/950),
+ [rabbitmq-server#953](https://github.com/rabbitmq/rabbitmq-server/issues/953)
+
+ * Eager queue sync will now stop as soon as a mirror shutdown/unavailability is detected.
+
+ GitHub issue: [rabbitmq-server#975](https://github.com/rabbitmq/rabbitmq-server/issues/975)
+
+ * Lazy queues now trigger runtime GC significantly less frequently, making moving data
+ to the message store more efficient.
+
+ GitHub issue: [rabbitmq-server#973](https://github.com/rabbitmq/rabbitmq-server/issues/973)
+
+ * Mirroring policies now respect the `nodes` parameter when re-electing a master.
+
+ GitHub issue: [](https://github.com/rabbitmq/rabbitmq-server/issues/990)
+
+ * After a partial network partitions, nodes with newly elected masters will now be more defensive
+ about the (no longer relevant) policy change notifications.
+
+ GitHub issue: [rabbitmq-server#1007](https://github.com/rabbitmq/rabbitmq-server/issues/1007)
+
+ * Late responses from peers in certain partial network partition scenarios are now
+ ignored by channels.
+
+ GitHub issue: [rabbitmq-server#1005](https://github.com/rabbitmq/rabbitmq-server/issues/1005)
+
+ * After a partial network partitions, nodes with newly elected masters will now be more defensive
+ about the (no longer relevant) synchronization start requests.
+
+ GitHub issue: [rabbitmq-server#1006](https://github.com/rabbitmq/rabbitmq-server/issues/1006)
+
+ * Network partition triggered late and duplicate acknowledgements are now ignored/filterd out
+ by queue leader processes.
+
+ GitHub issue: [rabbitmq-server#960](https://github.com/rabbitmq/rabbitmq-server/issues/960)
+
+ * Queue mirrors now try detect leader replica changes during partial partitions and do a clean stop
+ instead of failing because their state is in sync with the old leader and not the newly promoted one.
+
+ GitHub issue: [rabbitmq-server#944](https://github.com/rabbitmq/rabbitmq-server/issues/944)
+
+ * `rabbitmqctl` and server startup could be affected by some DNS configurations.
+
+ GitHub issue: [rabbitmq-server#890](https://github.com/rabbitmq/rabbitmq-server/issues/890)
+
+ * RPM package was split into two: for CentOS 6 and 7. The latter includes systemd support.
+
+ GitHub issue: [rabbitmq-server#932](https://github.com/rabbitmq/rabbitmq-server/issues/932)
+
+ * Workaround for Erlang/OTP bug OTP-13425 where crash dump generation failed on UNIX
+ platforms.
+
+ GitHub issue: [rabbitmq-server#956](https://github.com/rabbitmq/rabbitmq-server/issues/956)
+
+#### Enhancements
+
+ * Inter-node traffic buffer increased to 32 MB by default, configurable with `RABBITMQ_DISTRIBUTION_BUFFER_SIZE` (in kilobytes).
+
+ This improves network bandwidth utilization for inter-node traffic and reduces the probability
+ of spurious network partitions due to latency spikes caused by the buffer being full.
+
+ GitHub issue: [rabbitmq-server#908](https://github.com/rabbitmq/rabbitmq-server/issues/908)
+
+ * Default flow control settings were increased to `{200, 100}`. This helps with consumer throughput
+ in some cases. The value still can be configured using the `rabbit.credit_flow_default_credit` setting.
+
+ GitHub issue: [rabbitmq-server#949](https://github.com/rabbitmq/rabbitmq-server/issues/949)
+
+ * `rabbitmqctl list_queues` now has more filtering options.
+
+ GitHub issue: [rabbitmq-server#851](https://github.com/rabbitmq/rabbitmq-server/issues/851)
+
+ * `RABBITMQ_IGNORE_SIGINT` is a new environment variable that lets the user disable the
+ `+B` VM flag that is known to have issues in Erlang 18.x series (prevents crash dumps from being produced).
+
+ GitHub issue: [rabbitmq-server#956](https://github.com/rabbitmq/rabbitmq-server/issues/956)
+
+ * The `pg2_fixed` module is now replaced by the standard `pg2` from a newer Erlang/OTP release.
+
+ GitHub issue: [rabbitmq-server#980](https://github.com/rabbitmq/rabbitmq-server/issues/980)
+
+### MQTT Plugin
+
+#### Bug Fixes
+
+ * Authentication with correct username but omitted password succeeded when TLS/x509
+ certificate wasn't provided by the client. CVE allocation for this vulnerability is pending.
+
+ GitHub issue: [rabbitmq-mqtt#96](https://github.com/rabbitmq/rabbitmq-mqtt/issues/96)
+
+
+### JMS Topic Exchange Plugin
+
+#### Bug Fixes
+
+ * The plugin no longer tries to check if JMS client's version is identical/compatible.
+ This caused certain legitimate use cases, such as exchange declaration over HTTP API, to fail.
+
+ GitHub issue: [rabbitmq-jms-topic-exchange#9](https://github.com/rabbitmq/rabbitmq-jms-topic-exchange/issues/9)
+
+
+### Java Client
+
+#### Bug Fixes
+
+ * Fixed a NPE that can occur when a thread attempts to create a new channel after a connection
+ has been recovered but before channel recovery has completed.
+
+ GitHub issue: [rabbitmq-java-client#197](https://github.com/rabbitmq/rabbitmq-java-client/pull/197)
+
+ Contributed by Michael Dent.
+
+ * Binding cache entries of auto-delete queues should be cleaned up when the last consumer is cancelled.
+
+ GitHub issue: [rabbitmq-java-client#199](https://github.com/rabbitmq/rabbitmq-java-client/pull/199)
+
+ Contributed by Michael Dent.
+
+ * When a channel is closed, consumers will be correctly cleaned up from AutorecoveringConnection cache.
+
+ GitHub issue: [rabbitmq-java-client#208](https://github.com/rabbitmq/rabbitmq-java-client/issues/208)
+
+ * `junit` is no longer listed as a dependency in the OSGi manifest
+
+ GitHub issue: [rabbitmq-java-client#211](https://github.com/rabbitmq/rabbitmq-java-client/issues/211)
+
+#### Enhancements
+
+ * `com.rabbitmq.client.AddressResolver` is a new interface that can be used to "expand"
+ endpoints into groups of IP addresses, e.g. to a list of IP addresses resolved from a DNS
+ record.
+
+ GitHub issue: [rabbitmq-java-client#153](https://github.com/rabbitmq/rabbitmq-java-client/issues/153)
+
+ * Building on the new `AddressResolver` interface, the client now includes a resolver
+ that uses DNS SVR records to discover endpoints.
+
+ GitHub issue: [rabbitmq-java-client#104](https://github.com/rabbitmq/rabbitmq-java-client/issues/104)
+
+ * `com.rabbitmq.client.BuiltinExchangeType` is an enum that contains built-in exchange types.
+
+ GitHub issue: [rabbitmq-java-client#150](https://github.com/rabbitmq/rabbitmq-java-client/issues/150)
+
+
+### .NET Client
+
+#### Bug Fixes
+
+ * Fixed a possible `NullReferenceException` in `HeartbeatReadTimerCallback` during
+ connection closure.
+
+ GitHub issue: [rabbitmq-dotnet-client#257](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/257)
+
+ * Fixed a potential race condition in `EventingBasicConsumer`
+
+ GitHub issue: [rabbitmq-dotnet-client#242](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/242)
+
+### Federation Plugin
+
+#### Enhancements
+
+ * Federation links now specify a name which makes them easier to tell from other connections
+ in the management UI (requires RabbitMQ 3.6.3+).
+
+ GitHub issue: [rabbitmq-federation#39](https://github.com/rabbitmq/rabbitmq-federation/issues/39)
+
+
+### Shovel Plugin
+
+#### Enhancements
+
+ * Shovel connections now specify a name which makes them easier to tell from other connections
+ in the management UI (requires RabbitMQ 3.6.3+).
+
+ GitHub issue: [rabbitmq-shovel#19](https://github.com/rabbitmq/rabbitmq-shovel/issues/19)
+
+## Release Artifacts
+
+Available from:
+
+ * GitHub (see below)
+ * [Bintray](https://bintray.com/rabbitmq/)
+ * [Package Cloud](https://packagecloud.io/rabbitmq/)
+ * [rabbitmq.com](https://www.rabbitmq.com/download.html)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained. When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.6.tar.gz`.
diff --git a/release-notes/3.6.7.md b/release-notes/3.6.7.md
new file mode 100644
index 0000000000..4229651116
--- /dev/null
+++ b/release-notes/3.6.7.md
@@ -0,0 +1,413 @@
+## RabbitMQ 3.6.7
+
+RabbitMQ `3.6.7` is a maintenance release that includes a [new reworked management plugin](https://github.com/rabbitmq/rabbitmq-management/issues/236) that stores collected stats on all cluster nodes (as opposed to one dedicated node).
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading) and a note at the end of this release notes document for general documentation on upgrades.
+
+#### âš ï¸ Erlang/OTP R16B03 and 17.x Compatibility
+
+Erlang/OTP R16B03 users on Debian and Ubuntu may run into [runtime code compilation errors](https://groups.google.com/forum/#!topic/rabbitmq-users/XfQgta5v6Z0).
+While this [was addressed in 3.6.8](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_8), we recommend upgrading to at least Erlang 18.0 if possible
+(see [Debian installation guide](https://www.rabbitmq.com/install-debian.html) and [RPM installation guide](https://www.rabbitmq.com/install-rpm.html)).
+
+#### âš ï¸ New Management Plugin
+
+In clusters that have rabbitmq_management or rabbitmq_management_agent plugins enabled all nodes must be upgraded at the same time.
+
+There is a couple of public-facing changes in the HTTP API.
+
+##### Test Suites that use HTTP API
+
+Integration test suites that rely on HTTP API for listing
+or closing connections, channels, etc need to be adjusted with this plugin:
+
+ * We recommend reducing stats retention periods (see [Hop](https://github.com/rabbitmq/hop/blob/master/bin/before_build.sh#L11), [rabbit-hole](https://github.com/michaelklishin/rabbit-hole/blob/master/bin/ci/before_build.sh))
+ * Because stats emission is now two-step and asynchronous, test suites may need
+ to wait for events to propagate before asserting on them (see [Hop](https://github.com/rabbitmq/hop/blob/master/src/test/groovy/com/rabbitmq/http/client/ClientSpec.groovy#L168), [rabbit-hole](https://github.com/michaelklishin/rabbit-hole/blob/master/rabbithole_test.go#L130)).
+
+##### POST and PUT Responses Use `201 Created`
+
+POST and PUT responses now use `201 Created` instead of `204 No Content`.
+
+
+#### âš ï¸ Single Atom TCP Listener Options
+
+Single atom TCP listener options such as `binary` in this example:
+
+``` erlang
+[
+ {rabbit, [
+ {tcp_listen_options, [
+ binary,
+ {backlog, 4096},
+ {sndbuf, 32768},
+ {recbuf, 32768}
+ ]}
+ %% …
+ }].
+```
+
+won't serialise to JSON correctly in this release. This is [addressed for 3.6.8](https://github.com/rabbitmq/rabbitmq-management-agent/issues/34).
+Note that `binary` is one of the default options and there is no need to override it.
+
+There are no other known incompatibilities with 3.6.2 or later releases.
+
+### Server
+
+#### Bug Fixes
+
+ * queue leader process could terminate with a `{bad_match, {error, not_found}}` error.
+
+ GitHub issue: [rabbitmq-server#1035](https://github.com/rabbitmq/rabbitmq-server/issues/1035)
+
+ * `rabbitmq-service.bat` should exit with a non-0 code when installation fails.
+
+ GitHub issue: [rabbitmq-server#1052](https://github.com/rabbitmq/rabbitmq-server/issues/1052)
+
+ * `rabbitmqctl stop_app` now produces a more technically correct output about
+ what the operation does.
+
+ GitHub issue: [rabbitmq-server#1043](https://github.com/rabbitmq/rabbitmq-server/issues/1043)
+
+#### Enhancements
+
+ * Nodes in a cluster now can be restared in arbitrary order. They will attempt to contact
+ one of the previously seen peers periodically (10 times with 30 second intervals by default).
+
+ GitHub issue: [rabbitmq-server#1033](https://github.com/rabbitmq/rabbitmq-server/issues/1033)
+
+ * Increased credit flow default settings, background GC is disabled by default
+ for more predictable latency.
+
+ GitHub issue: [rabbitmq-server#1098](https://github.com/rabbitmq/rabbitmq-server/pull/1098)
+
+ * TLS listeners now support SNI (via [Ranch 1.3.0](https://ninenines.eu/articles/ranch-1.3/)).
+
+ GitHub issue: [rabbitmq-server#789](https://github.com/rabbitmq/rabbitmq-server/issues/789)
+
+ * Successful connection authentication now leaves additional log entries, just like authentication
+ failures. It is now easier to see from the log what user connected to what vhost.
+
+ GitHub issue: [rabbitmq-server#1140](https://github.com/rabbitmq/rabbitmq-server/issues/1140)
+
+ * Internal authentication backend now supports credential validators.
+
+ GitHub issue: [rabbitmq-server#1054](https://github.com/rabbitmq/rabbitmq-server/issues/1054)
+
+ * Worker process shutdown timeout now can be configured.
+
+ GitHub issue: [rabbitmq-server#847](https://github.com/rabbitmq/rabbitmq-server/issues/847)
+
+ * Default Erlang VM ETS limit was increased from 1400 to 50000. This is necessary to support
+ a larger number of virtual hosts with the new management plugin.
+
+ GitHub issue: [rabbitmq-server#1059](https://github.com/rabbitmq/rabbitmq-server/issues/1059)
+
+ * `RABBITMQ_PLUGINS_DIR` now supports multiple directories (colon-separated on Linux and other UNIX-like
+ platforms, semicolon-separated on Windows).
+
+ Contributed by Alexey Lebedeff.
+
+ GitHub issue: [rabbitmq-server#1001](https://github.com/rabbitmq/rabbitmq-server/issues/1001)
+
+ * Background GC now can be disabled and configured to use a different target interval value.
+ Original patch by Tim Stewart.
+
+ GitHub issue: [rabbitmq-server#1026](https://github.com/rabbitmq/rabbitmq-server/issues/1026)
+
+ * `L`-prefixed (`long-long-int`) attribute table (header) keys are now accepted by the parser.
+
+ GitHub issue: [rabbitmq-server#1093](https://github.com/rabbitmq/rabbitmq-server/issues/1093)
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Node health check responded with status 500 if health check failed due to a timeout.
+
+ GitHub issue: [rabbitmq-management#307](https://github.com/rabbitmq/rabbitmq-management/issues/307)
+
+ * `GET /api/nodes` response entries contained a duplicate JSON document property (key).
+
+ GitHub issue: [rabbitmq-management#305](https://github.com/rabbitmq/rabbitmq-management/issues/305)
+
+ * `rabbitmqadmin` listing commands failed to output values that contained non-ASCII characters
+ (such as queue names in Asian languages).
+
+ GitHub issue: [rabbitmq-management#343](https://github.com/rabbitmq/rabbitmq-management/issues/343)
+
+ * Queue details page now correctly displays the number of paged out transient messages.
+
+ GitHub issue: [rabbitmq-management#345](https://github.com/rabbitmq/rabbitmq-management/issues/345)
+
+ * When user filter returned no results, the message incorrectly said "no vhosts."
+
+ GitHub issue: [rabbitmq-management#357](https://github.com/rabbitmq/rabbitmq-management/issues/357)
+
+#### Enhancements
+
+ * New management plugin implementation that stores stats on all cluster nodes.
+
+ GitHub issue: [rabbitmq-management#236](https://github.com/rabbitmq/rabbitmq-management/issues/236)
+
+ * Purging a queue via UI now requires a confirmation.
+
+ GitHub issue: [rabbitmq-management#195](https://github.com/rabbitmq/rabbitmq-management/issues/195)
+
+ * Queue deletion and purging buttons now use separate UI panes.
+
+ GitHub issue: [rabbitmq-management#158](https://github.com/rabbitmq/rabbitmq-management/issues/158)
+
+ * Plugins that use HTTP (management, Web STOMP, Web MQTT) now register their ports
+ as TCP listeners.
+
+ GitHub issue: [rabbitmq-web-dispatch#14](https://github.com/rabbitmq/rabbitmq-web-dispatch/issues/14)
+
+ * Overview chart legend labels are now clearer and grouped in a more useful way.
+
+ GitHub issue: [rabbitmq-management#339](https://github.com/rabbitmq/rabbitmq-management/issues/339)
+
+ * Creating a queue in a vhost the user has no access to now provides
+ reasonable feedback.
+
+ GitHub issue: [rabbitmq-management#241](https://github.com/rabbitmq/rabbitmq-management/issues/241)
+
+ * Listing queues in a vhost the user has no access to now provides
+ reasonable feedback.
+
+ GitHub issue: [rabbitmq-management#237](https://github.com/rabbitmq/rabbitmq-management/issues/237)
+
+ * Deletion UI dialog has clearer wording.
+
+ GitHub issue: [rabbitmq-management#159](https://github.com/rabbitmq/rabbitmq-management/issues/159)
+
+ * When creating exchanges/queues, virtual host is now pre-selected to match the "current" one.
+
+ GitHub issue: [rabbitmq-management#235](https://github.com/rabbitmq/rabbitmq-management/issues/235)
+
+ * User creation form now features the "impersonator" tag.
+
+ GitHub issue: [rabbitmq-management#284](https://github.com/rabbitmq/rabbitmq-management/issues/284)
+
+ * Queue declaration form now includes a shortcut button for switching queue
+ mode to "lazy."
+
+ GitHub issue: [rabbitmq-management#205](https://github.com/rabbitmq/rabbitmq-management/issues/205)
+
+
+### MQTT Plugin
+
+#### Bug Fixes
+
+ * Fixed a memory leak in statistics tables in case of certain
+ abnormal connection termination scenarios.
+
+ GitHub issue: [rabbitmq-mqtt#117](https://github.com/rabbitmq/rabbitmq-mqtt/issues/117)
+
+ * Last Will messages that had the `retained` flag set were not retained.
+
+ GitHub issue: [rabbitmq-mqtt#74](https://github.com/rabbitmq/rabbitmq-mqtt/issues/74)
+
+ * More metrics are reported for MQTT connections.
+
+ GitHub issue: [rabbitmq-mqtt#121](https://github.com/rabbitmq/rabbitmq-mqtt/issues/121)
+
+ * Certain virtual host names could cause MQTT retainer to not start.
+
+ GitHub issue: [rabbitmq-mqtt#123](https://github.com/rabbitmq/rabbitmq-mqtt/issues/123)
+
+#### Enhancements
+
+ * Improved logging around connection termination due to unparseable traffic,
+ e.g. HTTP requests sent to MQTT plugin port.
+
+ GitHub issue: [rabbitmq-mqtt#119](https://github.com/rabbitmq/rabbitmq-mqtt/issues/119)
+
+ * There is now a way to map MQTT listener ports (target client connection ports)
+ to vhosts.
+
+ GitHub issue: [rabbitmq-mqtt#111](https://github.com/rabbitmq/rabbitmq-mqtt/issues/111)
+
+ * There is now a way to map client certificate CN values to vhosts and thus make
+ specific clients connect to designated vhosts.
+
+ GitHub issue: [rabbitmq-mqtt#73](https://github.com/rabbitmq/rabbitmq-mqtt/issues/73)
+
+ * Connections to non-existent hosts are now gracefully closed with a reasonable
+ error code (invalid credentials).
+
+ GitHub issue: [rabbitmq-mqtt#100](https://github.com/rabbitmq/rabbitmq-mqtt/issues/100)
+
+ * More configuration values are accepted as strings (as well as binaries).
+
+ GitHub issue: [rabbitmq-mqtt#86](https://github.com/rabbitmq/rabbitmq-mqtt/issues/86)
+
+
+### STOMP Plugin
+
+#### Bug Fixes
+
+ * Fixed a memory leak in statistics tables in case of certain
+ abnormal connection termination scenarios.
+
+ GitHub issue: [rabbitmq-stomp#103](https://github.com/rabbitmq/rabbitmq-stomp/pull/103)
+
+ * More metrics are reported for STOMP connections.
+
+ GitHub issue: [rabbitmq-stomp#102](https://github.com/rabbitmq/rabbitmq-stomp/issues/102)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * The plugin now tolerates a higher rate of link [re-]connection failures.
+ The rate sustained depends on the `reconnect-delay` parameter configured for the upstream
+ (see the issue below for details).
+
+ GitHub issue: [rabbitmq-federation#46](https://github.com/rabbitmq/rabbitmq-federation/issues/46)
+
+#### Enhancements
+
+ * Improved logging for federation links that voluntarily stop, e.g. because
+ something in their controlling policy has changed.
+
+ GitHub issue: [rabbitmq-federation#43](https://github.com/rabbitmq/rabbitmq-federation/issues/43)
+
+ * Improved logging around network partitions that can affect federation links.
+
+ GitHub issue: [rabbitmq-federation#53](https://github.com/rabbitmq/rabbitmq-federation/issues/53)
+
+
+### Federation Management Plugin
+
+#### Enhancements
+
+ * New HTTP API endpoint that lists only down links.
+
+ GitHub issue: [rabbitmq-federation-management#18](https://github.com/rabbitmq/rabbitmq-federation-management/issues/18)
+
+ * New HTTP API endpoint that restarts a link.
+
+ GitHub issue: [rabbitmq-federation-management#17](https://github.com/rabbitmq/rabbitmq-federation-management/issues/17)
+
+### LDAP Plugin
+
+#### Bug Fixes
+
+ * Match query was matching too eagerly ("bi-directionally").
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#56](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/56)
+
+ * Default LDAP server port was unintentionally set to `3890` (instead of `389`).
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#64](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/64)
+
+#### Enhancements
+
+ * LDAP server connection pool now supports time-based expiration (and closure) of inactive
+ LDAP connections.
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#7](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/7)
+
+
+### Top Plugin
+
+#### Enhancements
+
+ * Internal buffer length of processes that implement the `gen_server2` behaviour
+ (most notably queues) are now collected and visible in `rabbitmq_top`. Combined
+ with Erlang mailbox length this provides a more or less complete picture of Erlang
+ message backlog a process has.
+
+ GitHub issue: [rabbitmq-top#20](https://github.com/rabbitmq/rabbitmq-top/issues/20)
+
+
+### Event Exchange Plugin
+
+#### Enhacements
+
+ * Resource alarm events are now accessible via the event exchange.
+
+ GitHub issue: [rabbitmq-server#1120](https://github.com/rabbitmq/rabbitmq-server/issues/1120)
+
+
+
+### Web MQTT Plugin
+
+#### Enhancements
+
+ * Web MQTT port is now registered as a TCP listener and shows up in the management UI
+ listeners section.
+
+ GitHub issue: [rabbitmq-web-mqtt#17](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/17)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+ * When HTTP basic authentication is used but credentials are not provided, the plugin should
+ fall back to default configured STOMP plugin credentials.
+
+ GitHub issue: [rabbitmq-web-stomp#60](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/60)
+
+#### Enhancements
+
+ * Web STOMP port is now registered as a TCP listener and shows up in the management UI
+ listeners section.
+
+ GitHub issue: [rabbitmq-web-stomp#65](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/65)
+
+
+### Trust Store Plugin
+
+#### Enhancements
+
+ * Pluggable certificate providers (sources).
+
+ GitHub issue: [rabbitmq-trust-store#1](https://github.com/rabbitmq/rabbitmq-trust-store/issues/1)
+
+ * HTTP(S) certificate provider (source).
+
+ GitHub issue: [rabbitmq-trust-store#54](https://github.com/rabbitmq/rabbitmq-trust-store/issues/54)
+
+
+### .NET Client
+
+#### Bug Fixes
+
+ * `AutorecoveringConnection` had unsynchronised concurrent access to several of its mutable field.
+
+ GitHub issues: [rabbitmq-dotnet-client#288](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/288),
+ [rabbitmq-dotnet-client#291](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/291)
+
+
+### Erlang Client
+
+#### Bug Fixes
+
+ * The client OTP app now correctly defines dependencies on
+ `compiler` and `syntax_tools`.
+
+ GitHub issue: [rabbitmq-erlang-client#72](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/72)
+
+#### Enhancements
+
+ * URI parser now accepts binaries as well as strings.
+
+ GitHub issue: [rabbitmq-erlang-client#76](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/76)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained. When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.7.tar.gz`.
diff --git a/release-notes/3.6.8.md b/release-notes/3.6.8.md
new file mode 100644
index 0000000000..47929271ac
--- /dev/null
+++ b/release-notes/3.6.8.md
@@ -0,0 +1,59 @@
+## RabbitMQ 3.6.8
+
+RabbitMQ `3.6.8` is a maintenance release that restores Erlang/OTP R16B03 and 17.x compatibility
+that was unintentionally affected in [3.6.7](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7) by producing that release on a significantly newer Erlang/OTP version.
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading) for general documentation on upgrades.
+
+This release has no incompatibilities with 3.6.7. See [3.6.7 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7) upgrade and compatibility nodes if upgrading from an earlier release.
+
+### Server
+
+#### Bug Fixes
+
+ * Erlang/OTP R16B03 and 17.x are supported again by producing this release on R16B03.
+ Note that [some configurations or community plugins](https://www.rabbitmq.com/which-erlang.html) may require a later version.
+ We recommend all users to upgrade to a 19.x series release if possible. See [Debian](https://www.rabbitmq.com/install-debian.html) and [RPM installation](https://www.rabbitmq.com/install-rpm.html) guides for some Erlang/OTP installation options.
+
+ GitHub issues: [rabbitmq-server#1148](https://github.com/rabbitmq/rabbitmq-server/issues/1148), [rabbitmq-server#1149](https://github.com/rabbitmq/rabbitmq-server/issues/1149)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Certain user permission violations were resulting in a 500 response and scary log entries.
+
+ GitHub issue: [rabbitmq-management#360](https://github.com/rabbitmq/rabbitmq-management/issues/360)
+
+
+### Management Agent Plugin
+
+#### Bug Fixes
+
+ * Single atom proplist values no longer break JSON serialisation for endpoints such as `GET /api/overview`.
+
+ GitHub issue: [rabbitmq-management-agent#34](https://github.com/rabbitmq/rabbitmq-management-agent/issues/34)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+ * Web STOMP now accepts the same TCP listener options as core RabbitMQ and other plugins.
+
+ GitHub issue: [rabbitmq-web-stomp#69](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/69)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained. When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.8.tar.gz`.
diff --git a/release-notes/3.6.9.md b/release-notes/3.6.9.md
new file mode 100644
index 0000000000..a685ff2db8
--- /dev/null
+++ b/release-notes/3.6.9.md
@@ -0,0 +1,63 @@
+## RabbitMQ 3.6.9
+
+RabbitMQ `3.6.9` is a security and maintenance release.
+
+### Upgrades and Compatibility
+
+See the ["Upgrading clusters" section of the documentation](https://www.rabbitmq.com/clustering.html#upgrading)
+for general documentation on upgrades.
+
+This release has no incompatibilities with 3.6.7. See [3.6.7 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/rabbitmq_v3_6_7)
+upgrade and compatibility notes if upgrading from an earlier release.
+
+
+### Management and Management Agent Plugins
+
+#### Security Vulnerability Patches
+
+Details for the CVEs below are pending publication.
+
+ * `CVE-2017-4965`: XSS vulnerabilities in management UI
+ * `CVE-2017-4966`: authentication details are stored in browser-local storage without expiration
+ * `CVE-2017-4967`: XSS vulnerabilities in management UI
+
+As part of the patch addressing `CVE-2017-4966` management UI sessions were limited to 8 hours.
+
+#### Bug Fixes
+
+ * Certain TCP and TLS listener configuration settings could break JSON serialisation of
+ `GET /api/overview` responses.
+
+ GitHub issues: [rabbitmq-management-agent#39](https://github.com/rabbitmq/rabbitmq-management-agent/issues/39),
+ [rabbitmq-management#364](https://github.com/rabbitmq/rabbitmq-management/issues/364),
+ [rabbitmq-management-agent#36](https://github.com/rabbitmq/rabbitmq-management-agent/issues/36)
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * More numerical types are now handled for the "hops" property.
+
+ GitHub issue: [rabbitmq-federation#56](https://github.com/rabbitmq/rabbitmq-federation/issues/56)
+
+
+### .NET Client
+
+#### Bug Fixes
+
+ * Calling ExchangeBind more than once with the same arguments threw an exception.
+
+ GitHub issues: [rabbitmq-dotnet-client#314](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/314),
+ [rabbitmq-dotnet-client#317](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/317)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained. When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.6.9.tar.gz`.
diff --git a/release-notes/3.7.0.md b/release-notes/3.7.0.md
new file mode 100644
index 0000000000..846fcceae8
--- /dev/null
+++ b/release-notes/3.7.0.md
@@ -0,0 +1,475 @@
+This release introduces new features, changes to RabbitMQ distribution (e.g. what plugins ship
+with it), and bug fixes. It also includes all the changes from the [3.6.x release series](https://www.rabbitmq.com/changelog.html) unless stated otherwise.
+
+Documentation for this release is available at [the RabbitMQ website](https://www.rabbitmq.com).
+
+
+## Breaking Changes
+
+ * Minimum required Erlang/OTP version is now 19.3. Recent Erlang versions can be obtained from [Erlang Solutions](https://www.erlang-solutions.com/resources/download.html), [RabbitMQ zero dependency Erlang RPM](https://github.com/rabbitmq/erlang-rpm), as well as main and backports repositories of recent Ubuntu and Debian releases.
+
+ GitHub issues: [rabbitmq-server#1305](https://github.com/rabbitmq/rabbitmq-server/issues/1305), [rabbitmq-server#1307](https://github.com/rabbitmq/rabbitmq-server/issues/1307), [rabbitmq-common#234](https://github.com/rabbitmq/rabbitmq-common/pull/234).
+
+ * HTTP API has minor breaking changes in several endpoints (see below).
+
+ * .NET client is now [.NET Core-compatible](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/148).
+ Starting with `3.7.0`, [.NET client releases](https://github.com/rabbitmq/rabbitmq-dotnet-client/releases) are decoupled from RabbitMQ server releases and exclusively distributed via the [RabbitMQ.Client NuGet package](https://www.nuget.org/packages/RabbitMQ.Client/).
+
+ * Starting with `3.7.0`, [Java client releases](https://github.com/rabbitmq/rabbitmq-java-client/releases) are decoupled from RabbitMQ server releases and exclusively distributed via Maven: [RabbitMQ Milestones Maven repository](https://bintray.com/rabbitmq/maven-milestones), [RabbitMQ Maven repository](https://bintray.com/rabbitmq/maven), as well as Maven Central.
+
+ * Users tagged with `administrator` now implicitly have access to most operations in all vhosts,
+ including those they don't have explicit permissions for.
+
+ GitHub issue: [rabbitmq-management#461](https://github.com/rabbitmq/rabbitmq-management/issues/461)
+
+ * Plugins now must depend on the `rabbit` application [in order to be recognised as such by `rabbitmq-plugins list`](https://github.com/rabbitmq/rabbitmq-server/issues/1124).
+ Plugins that do not have the dependency will still function, can be enabled or disabled but won't appear
+ in `rabbitmq-plugins list` output.
+
+ * `rabbitmq_management_visualiser` plugin [no longer ships with RabbitMQ](https://groups.google.com/forum/#!searchin/rabbitmq-users/ANN$20rabbitmq_management_visualiser%7Csort:relevance/rabbitmq-users/WiHmPcmzNtI/YJ356EW4BAAJ) and is considered to be deprecated. Installations that have this plugin enabled **must disable it before upgrading**. This can
+ be done using `rabbitmq-plugins disable` (which supports offline modifications)
+ or by updating the list of plugins in `RABBITMQ_ENABLED_PLUGINS_FILE`.
+
+
+### Erlang/OTP 20 Support
+
+Erlang/OTP 20 has [breaking changes](https://groups.google.com/d/msg/rabbitmq-users/_imbAavBYjY/xHzMiGgMAgAJ) that affected RabbitMQ.
+
+3.7.0 supports OTP 20, including upgrades of existing installations to OTP 20. This required changes to the core and multiple plugins that ship with RabbitMQ.
+
+GitHub issues: [rabbitmq-server#1243](https://github.com/rabbitmq/rabbitmq-server/issues/1243), [rabbitmq-server#1250](https://github.com/rabbitmq/rabbitmq-server/pull/1250), [rabbitmq-server#1268](https://github.com/rabbitmq/rabbitmq-server/pull/1268),
+ [rabbitmq-server#1272](https://github.com/rabbitmq/rabbitmq-server/issues/1272), [rabbitmq-federation#58](https://github.com/rabbitmq/rabbitmq-federation/pull/58), [rabbitmq-management-agent#47](https://github.com/rabbitmq/rabbitmq-management-agent/pull/47),
+ [rabbitmq-management#415](https://github.com/rabbitmq/rabbitmq-management/pull/415), [rabbitmq-stomp#115](https://github.com/rabbitmq/rabbitmq-stomp/issues/115)
+
+
+### Core Server
+
+#### Enhancements
+
+ * New configuration file format
+
+ RabbitMQ now supports a [new configuration file format](http://next.rabbitmq.com/configure.html#config-file).
+ The format is [based on sysctl](https://github.com/basho/cuttlefish/wiki/Cuttlefish-for-Application-Users)
+ and is similar to `.ini` files. Erlang term configuration files are still supported, it is also
+ possible to combine the two formats (use both `rabbitmq.conf` and `advanced.config`).
+
+ Most documentation examples were updated to use both new and classic config formats. See the docs
+ at [next.rabbitmq.com](http://next.rabbitmq.com), in particular [the configuration guide](http://next.rabbitmq.com/configure.html),
+ for more information.
+
+ GitHub issues: [rabbitmq-server#550](https://github.com/rabbitmq/rabbitmq-server/issues/550),
+ [rabbitmq-server#1103](https://github.com/rabbitmq/rabbitmq-server/pull/1103),
+ [rabbitmq-auth-backend-ldap#59](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/59)
+
+ * Pluggable cluster peer discovery
+
+ Automatic clustering for blank (without an existing database) nodes now can discover
+ peers using pluggable backends. This is a set of features adopted from the [rabbitmq-autocluster](https://github.com/rabbitmq/rabbitmq-autocluster/) plugin by Gavin Roy.
+ Two implementations are available out of the box: one uses a config file- and another DNS A records.
+ Several more are available via plugins:
+
+ * [AWS](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws)
+ * [Consul](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul)
+ * [etcd](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd)
+ * [Kubernetes](https://github.com/rabbitmq/rabbitmq-peer-discovery-k8s)
+
+ GitHub issues: [rabbitmq-server#486](https://github.com/rabbitmq/rabbitmq-server/issues/486), [rabbitmq-server#988](https://github.com/rabbitmq/rabbitmq-server/issues/988), [rabbitmq-server#1143](https://github.com/rabbitmq/rabbitmq-server/issues/1143)
+ [rabbitmq-server#1202](https://github.com/rabbitmq/rabbitmq-server/issues/1202), [rabbitmq-server#1171](https://github.com/rabbitmq/rabbitmq-server/issues/1171), [rabbitmq-server#1257](https://github.com/rabbitmq/rabbitmq-server/issues/1257), [rabbitmq-server#1247](https://github.com/rabbitmq/rabbitmq-server/issues/1247)
+
+ * Nodes in a cluster now can be restared in arbitrary order. They will attempt to contact
+ one of the previously seen peers periodically (10 times with 30 second intervals by default).
+
+ GitHub issue: [rabbitmq-server#1022](https://github.com/rabbitmq/rabbitmq-server/issues/1022),
+ [rabbitmq-server#487](https://github.com/rabbitmq/rabbitmq-server/issues/487)
+
+ * Every virtual host now has separate message stores.
+ This improves resiliency and reduces contention in multitenant environments,
+ plus makes development of tools that perform backups and recovery of resting data easier
+ in the future.
+
+ During the upgrade nodes will migrate data to this new layout. This can take some time.
+ To reduce the amount of time, drain as many queues as possible before upgrading, e.g.
+ by stopping publishers without stopping consumers for a period of time.
+
+ GitHub issues: [rabbitmq-server#567](https://github.com/rabbitmq/rabbitmq-server/issues/567), [rabbitmq-server#1146](https://github.com/rabbitmq/rabbitmq-server/issues/1146), [rabbitmq-server#1280](https://github.com/rabbitmq/rabbitmq-server/issues/1280),
+ [rabbitmq-management#446](https://github.com/rabbitmq/rabbitmq-management/issues/446), [rabbitmq-server#1303](https://github.com/rabbitmq/rabbitmq-server/issues/1303), [rabbitmq-management#460](https://github.com/rabbitmq/rabbitmq-management/issues/460), [rabbitmq-server#1321](https://github.com/rabbitmq/rabbitmq-server/issues/1321)
+
+ * New [LevelDB-based message store index plugin](https://github.com/rabbitmq/rabbitmq-msg-store-index-eleveldb).
+
+ GitHub issue: [rabbitmq-server#838](https://github.com/rabbitmq/rabbitmq-server/issues/838)
+
+ * Support config file path values with and without file extensions
+
+ RabbitMQ no longer *requires* `RABBITMQ_CONFIG_FILE` values to not have a `.config` or `.conf` suffix. It will try appending both suffixes as needed when searching for suitable config file(s) to load.
+
+ GitHub Issue: [rabbitmq-server#691](https://github.com/rabbitmq/rabbitmq-server/issues/691)
+
+ * Operators now can configure how many concurrent connections are allowed in a vhost.
+
+ GitHub issue: [rabbitmq-server#500](https://github.com/rabbitmq/rabbitmq-server/issues/500)
+
+ * Operators now can configure how many queues can exist in a vhost.
+
+ GitHub issue: [rabbitmq-server#501](https://github.com/rabbitmq/rabbitmq-server/issues/501)
+
+ * Operator policies: their definitions are merged with the standard (user) policies and allow operators put global
+ limits in place, e.g. max queue length or message TTL.
+
+ GitHub issue: [rabbitmq-server#930](https://github.com/rabbitmq/rabbitmq-server/issues/930)
+
+ * It is now possible to configure a limited [prefetch value](https://www.rabbitmq.com/confirms.html) for all newly opened channels by default.
+ This helps prevent consumers that never acknowledge deliveries from exhausting server resources.
+ It is important to make sure that consumers that use [manual acknowledgements](https://www.rabbitmq.com/confirms.html) can cope with this
+ change before enabling it.
+
+ GitHub issue: [rabbitmq-server#1367](https://github.com/rabbitmq/rabbitmq-server/issues/1367)
+
+ * Plugin version constraints
+
+ RabbitMQ now has a mechanism for plugin authors to indicate what versions a plugin is compatible with.
+ Incompatible plugins are logged and ignored.
+
+ GitHub issues: [rabbitmq-server#591](https://github.com/rabbitmq/rabbitmq-server/issues/591), [rabbitmq-server#735](https://github.com/rabbitmq/rabbitmq-server/issues/735), [rabbitmq-server#1090](https://github.com/rabbitmq/rabbitmq-server/issues/1090)
+
+ * Lager-based logging: less [not prone to overload](https://s3.us-east-2.amazonaws.com/ferd.erlang-in-anger/text.v1.1.0.pdf), pluggable backends, debug log level, more flexibility in configuration.
+
+ RabbitMQ now uses [Lager](https://github.com/basho/lager) for its logging subsystem.
+ This brings a group of benefits: (quite verbose) debug log level, pluggable logging
+ backends (will require Lager plugins), and much
+ more flexibility in logging configuration.
+
+ See [RabbitMQ 3.7.0 logging guide](http://next.rabbitmq.com/logging.html), [rabbitmq.conf.example](https://github.com/rabbitmq/rabbitmq-server/blob/master/docs/rabbitmq.conf.example) (new style config)
+ and [rabbitmq.config.example](https://github.com/rabbitmq/rabbitmq-server/blob/v3.7.x/docs/rabbitmq.config.example) (classic/advanced config format) to learn more.
+
+ GitHub issues: [rabbitmq-server#94](https://github.com/rabbitmq/rabbitmq-server/issues/94), [rabbitmq-website#418](https://github.com/rabbitmq/rabbitmq-website/issues/418)
+
+ * Topic authorisation built around topic exchanges. This includes support for certain variables,
+ such as virtual host and username.
+
+ GitHub issues: [rabbitmq-server#505](https://github.com/rabbitmq/rabbitmq-server/issues/505), [rabbitmq-server#1085](https://github.com/rabbitmq/rabbitmq-server/issues/1085), [rabbitmq-server#1099](https://github.com/rabbitmq/rabbitmq-server/issues/1099), [rabbitmq-server#1229](https://github.com/rabbitmq/rabbitmq-server/issues/1229)
+
+ * When queue length limit is exceeded, publishers of messages that did not fit
+ now can opt-in to receive a nack by opting in (configuring an overflow behavior).
+ When the behavior is configured to reject, messages are also discarded more efficiently.
+
+ GitHub issue: [rabbitmq-server#995](https://github.com/rabbitmq/rabbitmq-server/issues/995)
+
+ * [Proxy Protocol](http://www.haproxy.org/download/1.8/doc/proxy-protocol.txt) support.
+
+ GitHub issue: [rabbitmq-server#589](https://github.com/rabbitmq/rabbitmq-server/issues/589)
+
+ * Deleting a vhost now will force close all connections in it.
+
+ GitHub issue: [rabbitmq-server#627](https://github.com/rabbitmq/rabbitmq-server/issues/627)
+
+ * Deleting a user account now will force close all of its connections.
+
+ GitHub issue: [rabbitmq-server#628](https://github.com/rabbitmq/rabbitmq-server/issues/628)
+
+ * OpenSUSE RPM package now supports systemd and requires Leap 42.2.
+
+ GitHub issue: [rabbitmq-server-release#31](https://github.com/rabbitmq/rabbitmq-server-release/pull/31)
+
+ * Standalone MacOS release now bundles Erlang 19.x.
+
+ GitHub issue: [rabbitmq-server-release#10](https://github.com/rabbitmq/rabbitmq-server-release/issues/10)
+
+ * First dead-lettering event now injects a separate set of top-level headers — `x-first-death-queue`, `x-first-death-reason`,
+ and `x-first-death-exchange` — in addition to the `x-deaths` entries.
+
+ GitHub issue: [rabbitmq-server#1332](https://github.com/rabbitmq/rabbitmq-server/issues/1332)
+
+ * LF and CR are now stripped off of names of queues and exchanges
+
+ Although line feeds in names are permitted under the AMQP 0-9-1 spec,
+ in practice they can make it very difficult to work with queues and exchanges.
+ Stripping them out makes life much easier for developers.
+
+ GitHub issue: [rabbitmq-server#710](https://github.com/rabbitmq/rabbitmq-server/issues/710)
+
+ * Deletion of auto-delete and exclusive queues now leaves (debug) log entries
+
+ GitHub issue: [rabbitmq-server#590](https://github.com/rabbitmq/rabbitmq-server/issues/590)
+
+ * `rabbitmqctl set_vm_high_watermark` now produces more useful error messages when
+ relative values provided are outside of the `0..1.0` range.
+
+ GitHub issue: [rabbitmq-cli#157](https://github.com/rabbitmq/rabbitmq-cli/issues/157)
+
+#### Bug Fixes
+
+ * Mirrored queue could terminate if a policy is re-applied concurrently after promotion
+
+ GitHub issue: [rabbitmq-server#803](https://github.com/rabbitmq/rabbitmq-server/issues/803)
+
+ * Old incarnations of queue mirrors are stopped before new ones start
+
+ GitHub issue: [rabbitmq-server#863](https://github.com/rabbitmq/rabbitmq-server/issues/863)
+
+ * Channel interceptors are enabled/disabled together with plugins
+
+ GitHub issue: [rabbitmq-server#559](https://github.com/rabbitmq/rabbitmq-server/issues/559)
+
+
+### CLI Tools
+
+CLI tools were [significatnly redesigned](https://groups.google.com/forum/#!searchin/rabbitmq-users/ANN$20New$20CLI$20tools%7Csort:relevance/rabbitmq-users/x0XugmBt-IE/t2wdjIdSBgAJ),
+now can be extended from plugins and support command
+aliases (à la Git). There's also a new tool, `rabbitmq-diagnostics`, and several new commands available.
+
+GitHub issues: [rabbitmq-server#577](https://github.com/rabbitmq/rabbitmq-server/issues/577), [rabbitmq-cli#38](https://github.com/rabbitmq/rabbitmq-cli/issues/38), [rabbitmq-server#1085](https://github.com/rabbitmq/rabbitmq-server/issues/1085),
+ [rabbitmq-cli#10](https://github.com/rabbitmq/rabbitmq-cli/issues/10), [rabbitmq-cli#178](https://github.com/rabbitmq/rabbitmq-cli/issues/178), [rabbitmq-cli#180](https://github.com/rabbitmq/rabbitmq-cli/issues/180).
+
+#### Enhancements
+
+ * New CLI command for closing connections in bulk (e.g. a specific vhost).
+
+ GitHub issue: [rabbitmq-server#849](https://github.com/rabbitmq/rabbitmq-server/issues/849)
+
+ * New CLI command for detecting potentially stuck
+ processes (previously invoked as `rabbit_diagnostics:maybe_stuck/0` via `rabbitmqctl eval`).
+
+ GitHub issue: [rabbitmq-cli#144](https://github.com/rabbitmq/rabbitmq-cli/issues/144)
+
+ * New CLI command that lists non-AMQP connections (e.g. MQTT).
+
+ GitHub issue: [rabbitmq-cli#121](https://github.com/rabbitmq/rabbitmq-cli/issues/121)
+
+ * New CLI command that outputs effective Erlang cookie hash.
+
+ GitHub issue: [rabbitmq-cli#151](https://github.com/rabbitmq/rabbitmq-cli/issues/151)
+
+ * `rabbitmq-plugins list` will now only recognise Erlang applications that depend on
+ `rabbit` as plugins. Plugins that lacks the dependency will still function and
+ can be enabled or disabled but won't be listed.
+
+ GitHub issue: [rabbitmq-server#1124](https://github.com/rabbitmq/rabbitmq-server/issues/1124)
+
+
+
+### Management plugin
+
+#### Enhancements
+
+ * Statistics database is now distributed across the cluster: each nodes stores its own stats.
+
+ GitHub issue: [rabbitmq-management#236](https://github.com/rabbitmq/rabbitmq-management/issues/236)
+
+* Migration to Cowboy REST
+
+ RabbitMQ management plugin as well as its extensions (e.g. those of
+ Federation and Shovel, `rabbitmq-top`) now uses [Cowboy REST](http://ninenines.eu/docs/en/cowboy/HEAD/guide/rest_handlers/)
+ instead of Webmachine. Cowboy is a state-of-the-art open source Erlang HTTP 1.1 server and REST micro framework
+ that is also used in the plugins that provide WebSocket support.
+
+ The change is largely invisible to management UI and HTTP API
+ clients but there are minor changes that can affect test suites: for example,
+ `POST` and `PUT` responses now use `201 Created` instead of `204 No Content`.
+
+ Plugins that extend management UI need to be ported to Cowboy REST and Cowboy 2.
+
+ GitHub issue: [rabbitmq-management#63](https://github.com/rabbitmq/rabbitmq-management/issues/63)
+
+ * Users tagged with `administrator` now implicitly have access to most operations in all vhosts,
+ including those they don't have explicit permissions for.
+
+ GitHub issue: [rabbitmq-management#461](https://github.com/rabbitmq/rabbitmq-management/issues/461)
+
+ * User that creates a vhost is automatically granted full permissions in it. This is a usability
+ improvement to the management UI. Note that this assumes that a user that has the permissions to create
+ vhosts also has the permission to grant themselves permissions to any vhost, so this changes nothing in
+ terms of security.
+
+ GitHub issue: [rabbitmq-management#445](https://github.com/rabbitmq/rabbitmq-management/issues/445)
+
+ * New HTTP API endpoint allows for bulk deletion of users.
+
+ GitHub issue: [rabbitmq-management#448](https://github.com/rabbitmq/rabbitmq-management/issues/448)
+
+ * Node endpoint now returns more information about [memory use breakdown](http://next.rabbitmq.com/memory-use.html).
+ This is **breaking change**.
+
+ GitHub issue: [rabbitmq-management#499](https://github.com/rabbitmq/rabbitmq-management/issues/499)
+
+ * Report more TLS-related values via HTTP API
+
+ The HTTP API now reports all SSL/TLS options available for the RabbitMQ server as well as for the management plugin.
+
+ GitHub issue: [rabbitmq-management#163](https://github.com/rabbitmq/rabbitmq-management/issues/163)
+
+ * Topic authorisation permission management.
+
+ GitHub issue: [rabbitmq-management#405](https://github.com/rabbitmq/rabbitmq-management/issues/405)
+
+ * Dual IP Stack Support
+
+ Management plugin now can be configured to use IPv6 or both IPv6 and IPv4.
+
+ GitHub issue: [rabbitmq-management#64](https://github.com/rabbitmq/rabbitmq-management/issues/64)
+
+ * "Get messages" in the UI now offers more requeueing options. This is a **breaking change** for HTTP API
+ clients as the list of accepted `ackmode` field values has [changed](https://rawcdn.githack.com/rabbitmq/rabbitmq-management/v3.7.0/priv/www/api/index.html).
+
+ GitHub issue: [rabbitmq-management#68](https://github.com/rabbitmq/rabbitmq-management/issues/68)
+
+
+### Java client
+
+These release notes are for the 4.0 release. Please see [Java client release notes](https://github.com/rabbitmq/rabbitmq-java-client/releases)
+for information about later releases.
+
+#### Enhancements
+
+ * Bump default TLS version to v1.2 with a fallback for older JDKs
+
+ The Java client now attempts to use TLS v1.2 by default (which many RabbitMQ servers prefer due to vulnerabilities in TLS v1)
+ but falls back to TLS v1 for older JDKs such as JDK 6.
+
+ GitHub issue: [rabbitmq-java-client#139](https://github.com/rabbitmq/rabbitmq-java-client/issues/139)
+
+ * Begin recovery after all shutdown listeners have been given a chance to run
+
+ GitHub issue: [rabbitmq-java-client#135](https://github.com/rabbitmq/rabbitmq-java-client/issues/135)
+
+ * `com.rabbitmq.client.Connection` and `com.rabbitmq.client.Channel` now implement `java.io.Closeable`
+
+ GitHub issue: [rabbitmq-java-client#131](https://github.com/rabbitmq/rabbitmq-java-client/issues/131)
+
+
+### .NET Client
+
+These release notes are for the 4.0 release. Please see [.NET client release notes](https://github.com/rabbitmq/rabbitmq-dotnet-client/releases)
+for information about later releases.
+
+#### Enhancements
+
+ * .NET Core support (as of .NET client `4.0.0`).
+
+ GitHub issues: [rabbitmq-dotnet-client#148](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/148), [rabbitmq-dotnet-client#213](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/213), [rabbitmq-dotnet-client#206](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/206)
+
+ * AppVeyor CI and NuGet feed of Nightly Builds
+
+ GitHub issue: [rabbitmq-dotnet-client#206](https://github.com/rabbitmq/rabbitmq-dotnet-client/issues/206)
+
+
+### MQTT plugin
+
+#### Enhancements
+
+ * Topic authorisation.
+
+ GitHub issues: [rabbitmq-mqtt#95](https://github.com/rabbitmq/rabbitmq-mqtt/issues/95), [rabbitmq-server#505](https://github.com/rabbitmq/rabbitmq-server/issues/505), [rabbitmq-mqtt#114](https://github.com/rabbitmq/rabbitmq-mqtt/issues/114).
+
+ * Client ID is propagated to [authentication backends](http://rabbitmq.com/access-control.html).
+
+ GitHub issue: [rabbitmq-mqtt#139](https://github.com/rabbitmq/rabbitmq-mqtt/issues/139)
+
+ * QoS 2 subscriptions are downgraded to QoS 1
+
+ GitHub issue: [rabbitmq-mqtt#21](https://github.com/rabbitmq/rabbitmq-mqtt/issues/21)
+
+
+### Shovel Plugin
+
+#### Enhancements
+
+ * Support for AMQP 1.0 and a foundation for more protocols supported in the future.
+ This means cross-protocol shoveling (AMQP 0-9-1 to AMQP 1.0 or the other way around) is now
+ supported. [Erlang client for AMQP 1.0](https://github.com/rabbitmq/rabbitmq-amqp1.0-client) is now also available
+ as a standalone project.
+
+ GitHub issue: [rabbitmq-shovel#26](https://github.com/rabbitmq/rabbitmq-shovel/issues/26)
+
+ * Message timestamping.
+
+ Shovel now adds an extra header that contains the timestamp
+ indicating when message was shovelled.
+
+ GitHub issue: [rabbitmq-shovel#2](https://github.com/rabbitmq/rabbitmq-shovel/issues/2)
+
+
+### Federation Plugin
+
+#### Enhancements
+
+ * New CLI command that restarts a link.
+
+ GitHub issue: [rabbitmq-federation#45](https://github.com/rabbitmq/rabbitmq-federation/issues/45)
+
+#### Bug Fixes
+
+ * Internal exchanges and queues are now cleaned up if the policy goes out of effect or the plugin is disabled.
+
+ GitHub issue: [rabbitmq-federation#63](https://github.com/rabbitmq/rabbitmq-federation/issues/63)
+
+### Event Exchange Plugin
+
+#### Enhancements
+
+ * Acting user information is now included into the emitted events where possible.
+
+ GitHub issue: [rabbitmq-event-exchange#10](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/10)
+
+
+### AMQP 1.0 Plugin
+
+This release introduces a new sub-project, an [Erlang client for AMQP 1.0](https://github.com/rabbitmq/rabbitmq-amqp1.0-client), and AMQP 1.0 support
+in the Shovel plugin.
+
+#### Bug Fixes
+
+ * Transfer frames are no longer sent before credit has been granted.
+
+ GitHub issue: [rabbitmq-amqp1.0#43](https://github.com/rabbitmq/rabbitmq-amqp1.0/issues/43)
+
+ * Ensure messages with `uint` TTL can round trip.
+
+ GitHub issue: [rabbitmq-amqp1.0#13](https://github.com/rabbitmq/rabbitmq-amqp1.0/issues/13)
+
+
+### LDAP Authn/Authz Backend
+
+#### Enhancements
+
+ * Topic authorisation support with variable expansion.
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#71](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/71)
+
+
+### Delayed Message Exchange Plugin
+
+#### Enhancements
+
+ * It is now possible to see how many messages are delayed in management UI
+
+ GitHub issue: [rabbitmq-delayed-message-exchange#3](https://github.com/rabbitmq/rabbitmq-delayed-message-exchange/issues/3)
+
+
+### Erlang Client
+
+#### Bug Fixes
+
+ * Connection operations now use a reasonable timeout.
+
+ GitHub issue: [rabbitmq-erlang-client#85](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/85)
+
+
+### AMQP 0-9-1 Authn/Authz Backend
+
+ * Topic authorisation support with variable expansion.
+
+ GitHub issue: [rabbitmq-auth-backend-amqp#16](https://github.com/rabbitmq/rabbitmq-auth-backend-amqp/issues/16)
+
+
+## Upgrading
+
+To upgrade a non-clustered RabbitMQ simply install the new version. All configuration and persistent message data are retained.
+When upgrading using definitions export/import from versions earlier than 3.6.0, see http://rabbitmq.com/passwords.html.
+
+To upgrade a RabbitMQ cluster, follow the instructions [in RabbitMQ documentation](https://www.rabbitmq.com/clustering.html#upgrading).
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins, CLI tools or other sub-projects. Please download the archive named `rabbitmq-server-<version>.tar.xz` from this release
+page.
diff --git a/release-notes/3.7.1.md b/release-notes/3.7.1.md
new file mode 100644
index 0000000000..6fa0765a16
--- /dev/null
+++ b/release-notes/3.7.1.md
@@ -0,0 +1,121 @@
+## RabbitMQ 3.7.1
+
+RabbitMQ `3.7.1` is a bug fix release.
+
+### Upgrades and Compatibility
+
+This release has no known incompatibilities with 3.7.0. See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0)
+upgrade and compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html)
+for general documentation on upgrades.
+
+See [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Standalone Mac build now bundles a version of Erlang/OTP that is compatible with High Sierra (20.1, to be specific).
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#68](https://github.com/rabbitmq/rabbitmq-server-release/issues/68)
+
+ * RabbitMQ distribution now again can be built in an offline environment (one without access to the Internet).
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#61](https://github.com/rabbitmq/rabbitmq-server-release/issues/61)
+
+ * `total_memory_available_override_value` was not available via the [new config format](https://www.rabbitmq.com/configure.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1445](https://github.com/rabbitmq/rabbitmq-server/issues/1445)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl set_parameter` ignored the virtual host argument and default to the default (`/`) vhost.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#232](https://github.com/rabbitmq/rabbitmq-cli/issues/232)
+
+
+### AWS Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * HTTP client proxy settings were applied to late and thus didn't have any effect.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-common#5](https://github.com/rabbitmq/rabbitmq-peer-discovery-common/issues/5)
+
+
+### Consul Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * HTTP client proxy settings were applied to late and thus didn't have any effect.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-common#5](https://github.com/rabbitmq/rabbitmq-peer-discovery-common/issues/5)
+
+
+### etcd Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * HTTP client proxy settings were applied to late and thus didn't have any effect.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-common#5](https://github.com/rabbitmq/rabbitmq-peer-discovery-common/issues/5)
+
+
+### Kubernetes Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * HTTP client proxy settings were applied to late and thus didn't have any effect.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-common#5](https://github.com/rabbitmq/rabbitmq-peer-discovery-common/issues/5)
+
+
+### Management and Management Agent Plugins
+
+#### Bug Fixes
+
+ * Certain node metrics in multi-cluster environment were not merged correctly
+ when aggregated.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#525](https://github.com/rabbitmq/rabbitmq-management/issues/525)
+
+
+### Auth Backend HTTP Plugin
+
+#### Bug Fixes
+
+ * It was not possible to configure request HTTP method using the [new style config file](https://www.rabbitmq.com/configure.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-http#61](https://github.com/rabbitmq/rabbitmq-auth-backend-http/issues/61)
+
+
+### Shovel Management Plugin
+
+#### Bug Fixes
+
+ * Move Messages form on individual queue page didn't provide all the Shovel parameters required as
+ of 3.7.0 and thus did not transfer any messages.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel-management#24](https://github.com/rabbitmq/rabbitmq-shovel-management/issues/24)
+
+
+### Erlang AMQP 1.0 Client
+
+#### Bug Fixes
+
+ * Continuation frames without `delivery_id` were not handled correctly.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#11](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/issues/11)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.1.tar.gz`.
diff --git a/release-notes/3.7.10.md b/release-notes/3.7.10.md
new file mode 100644
index 0000000000..7b38f3833e
--- /dev/null
+++ b/release-notes/3.7.10.md
@@ -0,0 +1,248 @@
+## RabbitMQ 3.7.10
+
+RabbitMQ `3.7.10` is a maintenance release. It focuses on bug fixes and minor usability improvements.
+
+### Compatibility Notes
+
+`rabbitmq-plugins` in this release will produce an extra leading line of output, as they did in `3.6.x` releases,
+when `-q` is not provided. Tools that parse `rabbitmq-plugins` output should use `-q --no-table-headers` to suppress
+all output meant for interactive use.
+
+### Erlang/OTP Compatibility Notes
+
+This is the [**last release to support Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ). Users are highly encouraged to
+upgrade to a more recent [supported Erlang version](https://www.rabbitmq.com/which-erlang.html).
+
+#### Upgrading to Erlang 21.x
+
+When upgrading to this release **and upgrading Erlang to 21.x** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21, RabbitMQ **must be upgraded before Erlang**.
+
+#### Documentation Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and
+compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Channel stats emission performed some operations without a timeout, so listing channels or running
+ a node health check could block and potentially cause temporary process accumulation on the node.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1783](https://github.com/rabbitmq/rabbitmq-server/issues/1783)
+
+ * Virtual host process memory usage was higher than necessary.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1770](https://github.com/rabbitmq/rabbitmq-server/pull/1770)
+
+ * Node could run into a failed assertion after a partial partition.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1682](https://github.com/rabbitmq/rabbitmq-server/issues/1682)
+
+ * Topic authorisation could fail with an exception if connection terminated (cleanly or abruptly) concurrently.
+
+ GitHub issues: [rabbitmq/rabbitmq-common#290](https://github.com/rabbitmq/rabbitmq-common/pull/290), [rabbitmq/rabbitmq-server#1538](https://github.com/rabbitmq/rabbitmq-server/issues/1538)
+
+ * Pointing `RABBITMQ_SERVER_CODE_PATH` at a HiPE-precompiled code directory didn't result in HiPE-compiled
+ modules being loaded.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1777](https://github.com/rabbitmq/rabbitmq-server/issues/1777)
+
+#### Enhancements
+
+ * `kernel.inet_dist_listen_min` and `kernel.inet_dist_listen_max` could not be configured via new style config.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1784](https://github.com/rabbitmq/rabbitmq-server/issues/1784)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl list_consumers` only listed consumers on queues that had leader replica hosted on
+ the target node. Instead consumers of all queues in the virtual host should have been listed.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#284](https://github.com/rabbitmq/rabbitmq-cli/issues/284)
+
+ * A combination of `RABBITMQ_NODENAME` and `RABBITMQ_USE_LONGNAME` now works the same way for CLI
+ tools as prior to 3.7.0.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#278](https://github.com/rabbitmq/rabbitmq-cli/issues/278)
+
+ * New command: `rabbitmq-diagnostics tls_versions`, that lists TLS versions available in the runtime
+ on the target nodes.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#289](https://github.com/rabbitmq/rabbitmq-cli/pull/289)
+
+ * `rabbitmqadmin declare user` now supports providing a password hash and algorithm instead of a password.
+
+ Contributed by John W. Higgings.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#641](https://github.com/rabbitmq/rabbitmq-management/pull/641)
+
+ * `-q` was hardcoded for `rabbitmq-plugins`
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1781](https://github.com/rabbitmq/rabbitmq-server/issues/1781)
+
+#### Enhancements
+
+ * `rabbitmqctl set_log_level` is a new command that changes effective log level of a node at runtime.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#276](https://github.com/rabbitmq/rabbitmq-cli/issues/276)
+
+ * `rabbitmq-diagnostics runtime_thread_stats` is a new command that outputs runtime thread state stats.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#282](https://github.com/rabbitmq/rabbitmq-cli/pull/282)
+
+ * `rabbitmq-plugins directories` is a new command that outputs effective [plugin directories](https://www.rabbitmq.com/plugins.html#plugin-directories).
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#261](https://github.com/rabbitmq/rabbitmq-cli/issues/261)
+
+ * `--silent` is a new flag that combines `--quiet` and `--no-table-headers` (where applicable).
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#274](https://github.com/rabbitmq/rabbitmq-cli/issues/274)
+
+ * Improved error messages for several common issues that can make `rabbitmq-plugins` commands fail.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#283](https://github.com/rabbitmq/rabbitmq-cli/pull/283)
+
+
+### Management and Management Agent Plugins
+
+#### Bug Fixes
+
+ * Exported definitions for an individual virtual host were missing [runtime parameters](https://www.rabbitmq.com/parameters.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-management#628](https://github.com/rabbitmq/rabbitmq-management/issues/628)
+
+ * Filter inputs now escape incomplete HTML tags from their values. Previously they only stripped complete and valid tags.
+ Note that the inputs are not populated from request parameters and have to be manually populated by the user.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#646](https://github.com/rabbitmq/rabbitmq-management/issues/646)
+
+ * Exclusive queue owner connection now again can be navigated to using a link on the queue page.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#467](https://github.com/rabbitmq/rabbitmq-management/issues/467)
+
+#### Enhancements
+
+ * More TLS options, including cipher suites, now can be configured using new style config format.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#643](https://github.com/rabbitmq/rabbitmq-management/issues/643)
+
+ * Metric chart ranges now only list options for which enough data is [retained](https://www.rabbitmq.com/management.html#sample-retention).
+
+ GitHub issue: [rabbitmq/rabbitmq-management#635](https://github.com/rabbitmq/rabbitmq-management/issues/635)
+
+
+### STOMP Plugin
+
+#### Enhancements
+
+ * TCP listener options now can be configured using new style configuration format.
+
+ GitHub issue: [rabbitmq/rabbitmq-stomp#129](https://github.com/rabbitmq/rabbitmq-stomp/issues/129)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Backported several stability improvements for interaction with AMQP 1.0 endpoints from the 3.8 development
+ branch (master).
+
+ Changes: [one](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/commit/8f0f7df34904abdee67e9764406837a5a7ecc091), [two](https://github.com/rabbitmq/rabbitmq-amqp1.0-common/commit/43bf4486fe6b993e5bc000056e78767889a23bbe)
+
+ * Shovels now can be created with blank publish properties.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#46](https://github.com/rabbitmq/rabbitmq-shovel/issues/46)
+
+
+### Consistent Hash Exchange Plugin
+
+#### Bug Fixes
+
+ * The plugin relied on a non-deterministic ordering behavior when bindings were removed (e.g. because a queue was
+ deleted). This could lead to inconsistent hash ring state and affect routing behavior.
+
+ GitHub issue: [rabbitmq/rabbitmq-consistent-hash-exchange#40](http://github.com/rabbitmq/rabbitmq-consistent-hash-exchange/issues/40)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+ * When `web_mqtt.ssl.listener` is set, TLS certificate, key and other TLS settings were ignored.
+
+ GitHub: [rabbitmq/rabbitmq-web-stomp#104](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/104)
+
+#### Enhancements
+
+ * [Proxy protocol](https://www.rabbitmq.com/networking.html#proxy-protocol) support.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#84](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/84)
+
+ * TCP (as in, non-TLS) listener now can be disabled.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#100](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/100)
+
+ * More TLS options, including cipher suites, now can be configured using new style config format.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#101](https://github.com/rabbitmq/rabbitmq-web-stomp/pull/101)
+
+ * Proxy protocol now can be enabled via new style config format.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#99](https://github.com/rabbitmq/rabbitmq-web-stomp/pull/99)
+
+ * Resource-driven flow control support.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#40](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/40)
+
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+ * When `web_mqtt.ssl.listener` is set, TLS certificate, key and other TLS settings were ignored.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#48](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/48)
+
+ * Last Will message was not sent when connection that had it configured was closed cleanly.
+
+ Contributed by Karen Mae Bajador.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#52](https://github.com/rabbitmq/rabbitmq-web-mqtt/pull/52)
+
+#### Enhancements
+
+ * [Proxy protocol](https://www.rabbitmq.com/networking.html#proxy-protocol) support.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#29](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/29)
+
+ * TCP (as in, non-TLS) listener now can be disabled.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#31](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/31)
+
+ * More TLS options, including cipher suites, now can be configured using new style config format.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#45](https://github.com/rabbitmq/rabbitmq-web-mqtt/pull/45)
+
+ * Proxy protocol now can be enabled via new style config format.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#46](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/46)
+
+ * Resource-driven flow control support.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#40](https://github.com/rabbitmq/rabbitmq-web-mqtt/pull/40)
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.10.tar.gz`.
diff --git a/release-notes/3.7.11.md b/release-notes/3.7.11.md
new file mode 100644
index 0000000000..c93941c337
--- /dev/null
+++ b/release-notes/3.7.11.md
@@ -0,0 +1,139 @@
+## RabbitMQ 3.7.11
+
+RabbitMQ `3.7.11` is a maintenance release. It focuses on bug fixes and
+minor usability improvements. This is the first release to **require Erlang/OTP 20.3+**.
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**drops support for Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ). Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains what package repositories and tools can be used to provision latest patch versions of Erlang `20.3.x` and `21.x`.
+
+### Compatibility Notes
+
+ * This release requires Erlang/OTP 20.3 or later.
+ * Default CLI command exit code has changed from 70 (`EX_UNAVAILABLE`) to 69 (`EX_SOFTWARE`).
+ This does not affect all commands and all error conditions, e.g. argument validation still uses code 64 (`EX_USAGE`)
+
+#### Upgrading to Erlang 21.x
+
+When upgrading to this release **and upgrading Erlang to 21.x** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21, RabbitMQ **must be upgraded before Erlang**.
+
+#### Documentation Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and
+compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Erlang Version Requirement
+
+ * This release **requires Erlang/OTP `20.3.x`** or `21.x`.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1820](https://github.com/rabbitmq/rabbitmq-server/issues/1820)
+
+#### Bug Fixes
+
+ * Number of socket file descriptors used is now correctly incremented.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#652](https://github.com/rabbitmq/rabbitmq-management/issues/652)
+
+ * Configuration schema file was not copied during upgrades.
+
+ GitHub issues: [rabbitmq/rabbitmq-server#1843](https://github.com/rabbitmq/rabbitmq-server/issues/1843)
+
+ * First heartbeat frame on a successfully negotiated connection is now sent after a half of the negotiated heartbeat
+ timeout. Previously it was sent after a complete timeout. Since most connections have other activity after
+ connection handshake is complete, this didn't have any visible effects on most systems.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#293](https://github.com/rabbitmq/rabbitmq-common/pull/293)
+
+ * If Syslog was configured to use TCP, the node could fail to start.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1850](https://github.com/rabbitmq/rabbitmq-server/issues/1850)
+
+### CLI Tools
+
+#### Enhancements
+
+ * Several new `rabbitmq-diagnostics` commands useful for diagnostics and health checks.
+
+ GitHub issues: [rabbitmq/rabbitmq-cli/issues/292](https://github.com/rabbitmq/rabbitmq-cli/issues/292), [rabbitmq/rabbitmq-cli#303](https://github.com/rabbitmq/rabbitmq-cli/pull/303), [rabbitmq/rabbitmq-cli#303](https://github.com/rabbitmq/rabbitmq-cli/pull/303)
+
+ * All CLI tools now can display command-specific help with `[command name] --help`, e.g. `rabbitmqctl set_policy --help`
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#306](https://github.com/rabbitmq/rabbitmq-cli/pull/306)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * `rabbitmqadmin` now defines an exception not available in Python 2, making TCP connection
+ error reporting more similar with that on Python 3.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#647](https://github.com/rabbitmq/rabbitmq-management/issues/647)
+
+
+### AWS Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * When an EC2 API endpoint could not be reached, the plugin terminated in a way that
+ brought down the entire node with it.
+
+ GitHib issue: [rabbitmq/rabbitmq-peer-discovery-aws#25](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws/issues/25)
+
+
+### STOMP Plugin
+
+#### Enhancements
+
+ * Avoid using a private socket API function that's likely going away in Erlang 22.
+
+ GitHub issue: [rabbitmq/rabbitmq-stomp#128](https://github.com/rabbitmq/rabbitmq-stomp/pull/128)
+
+ * Default topic exchange used by the plugin is now configurable.
+
+ GitHub issue: [rabbitmq/rabbitmq-stomp#94](https://github.com/rabbitmq/rabbitmq-stomp/issues/94)
+
+
+### MQTT Plugin
+
+#### Enhancements
+
+ * Avoid using a private socket API function that's likely going away in Erlang 22.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#171](https://github.com/rabbitmq/rabbitmq-mqtt/pull/171)
+
+
+### AuthN/AuthZ Backend Cache Plugin
+
+#### Bug Fixes
+
+ * The new style config format did not support `auth_cache.cache_refusals`
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-cache#17](https://github.com/rabbitmq/rabbitmq-auth-backend-cache/issues/17)
+
+### Erlang Client
+
+#### Enhancements
+
+ * Avoid using a private socket API function that's likely going away in Erlang 22.
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#114](https://github.com/rabbitmq/rabbitmq-erlang-client/pull/114)
+
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.11.tar.gz`.
diff --git a/release-notes/3.7.12.md b/release-notes/3.7.12.md
new file mode 100644
index 0000000000..efd142b174
--- /dev/null
+++ b/release-notes/3.7.12.md
@@ -0,0 +1,209 @@
+## RabbitMQ 3.7.12
+
+RabbitMQ `3.7.12` is a maintenance release. It focuses on bug fixes and
+minor usability improvements. This release [**requires Erlang/OTP 20.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories)
+explains what package repositories and tools can be used to provision latest patch versions of
+Erlang `20.3.x` and `21.x`.
+
+### Compatibility Notes
+
+#### Minimum Required Erlang Version
+
+This release requires Erlang/OTP 20.3 or later.
+
+#### `rabbitmqctl shutdown` Behavior with Remote Nodes
+
+In earlier releases, when `rabbitmqctl shutdown` was used against a remote node, it would successfully
+shut down the node but won't actually wait for its termination because the pid file path
+reported by the remote node is not available locally. This is because `rabbitmqctl shutdown` was
+meant to be used as a `rabbitmqctl stop` alternative that doesn't require the caller to know the
+pid file path.
+
+Starting with this version, `rabbitmqctl shutdown` will refuse to run against a remote node
+by default. If the earlier behavior is desired and expected, it can be restored by passing `--no-wait`:
+
+``` shell
+# will refuse to run against a remote node in --wait mode as of 3.7.12
+# and --wait is on by default
+rabbitmqctl shutdown -n rabbit@hostname.different.from.local --longnames
+
+# behaves the same way as in 3.7.0 through 3.7.11
+rabbitmqctl shutdown -n rabbit@hostname.different.from.local --longnames --no-wait
+```
+
+#### `inet_dist_listen_min` and `inet_dist_listen_max` in New Style Configuration
+
+`inet_dist_listen_min` and `inet_dist_listen_max` were **removed from new style configuration**.
+They wouldn't have any effect due to how configuration translation is performed.
+Use `advanced.config` or the `RABBITMQ_DIST_PORT` env variable to configure these settings:
+
+``` erlang
+[
+ {kernel, [
+ {inet_dist_listen_min, 33672},
+ {inet_dist_listen_max, 33672}
+ ]},
+ {rabbit, [
+ %% ...
+ ]}
+].
+```
+
+Note that some runtime parameters, e.g. `net_ticktime`, can be set via new style configuration.
+This change is specific to the distirbution port settings.
+
+#### Upgrading to Erlang 21.x
+
+When upgrading to this release **and upgrading Erlang to 21.x** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and
+compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * When a node was configured to allow for unlimited frame size, an empty message published
+ via HTTP API resulted in an infinite recursion loop in the parser.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#299](https://github.com/rabbitmq/rabbitmq-common/issues/299)
+
+ * `inet_dist_listen_min` and `inet_dist_listen_max` were **removed from new style configuration**.
+ They wouldn't have any effect due to how configuration translation is performed.
+ Use `advanced.config` or the `RABBITMQ_DIST_PORT` env variable to configure these settings:
+
+ ``` erlang
+ [
+ {kernel, [
+ {inet_dist_listen_min, 33672},
+ {inet_dist_listen_max, 33672}
+ ]},
+ {rabbit, [
+ %% ...
+ ]}
+ ].
+ ```
+
+ Note that some runtime parameters, e.g. `net_ticktime`, can be set via new style configuration.
+ This change is specific to the distirbution port settings.
+
+ Contributed by [Gabriele Santomaggio](https://github.com/Gsantomaggio).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1881](https://github.com/rabbitmq/rabbitmq-server/pull/1881)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl` and other CLI tools could fail when used with nodes using long node names.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#311](https://github.com/rabbitmq/rabbitmq-cli/issues/311)
+
+ * `rabbitmqctl --help` now exits with exit code of 0. `rabbitmqctl` (no arguments) uses code
+ 64 (`EX_USAGE`).
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#307](https://github.com/rabbitmq/rabbitmq-cli/issues/307)
+
+#### Enhancements
+
+ * `rabbitmqctl shutdown` now requires the user to opt in and provide `--no-wait`
+ to be used with remote nodes. There are no behavior changes when the command is invoked
+ against a locally running node. See also the compatibility note to this release.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#309](https://github.com/rabbitmq/rabbitmq-cli/pull/309)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Large HTTP API request bodies (e.g. importing a large [definition file](https://www.rabbitmq.com/backup.html#rabbitmq-definitions))
+ were not guaranteed to be fully consumed before parsing.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#657](https://github.com/rabbitmq/rabbitmq-management/issues/657)
+
+ * `management_db_cache_multiplier` configuration setting was ignored due to a typo.
+
+ Contributed by [Josh Soref](https://github.com/jsoref).
+
+ GitHub issue: [rabbitmq/rabbitmq-management#659](https://github.com/rabbitmq/rabbitmq-management/pull/659)
+
+
+### MQTT Plugin
+
+#### Enhancements
+
+ * When a retained message is published, its topic is now correctly translated
+ to use MQTT topic separators (a slash) regardless of the internal representation.
+
+ Contributed by [Ryan Sandbach](https://github.com/rsandbach).
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#175](https://github.com/rabbitmq/rabbitmq-mqtt/issues/175)
+
+ * When a client sent a duplicate `CONNECT` frame (tried to "reauthenticate"), connection failed
+ [with a scary looking exception](https://groups.google.com/d/msg/rabbitmq-users/1MBdymMAZzw/HQLDwI_8GAAJ).
+
+ Now the frame is ignored (reauthentication is therefore still impossible) and a warning is logged
+ but the connection is kept open.
+
+ Kudos to Grigory Starinkin for confirming our original hypothesis and providing the steps
+ to reproduce.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#179](https://github.com/rabbitmq/rabbitmq-mqtt/issues/179).
+
+#### Bug Fixes
+
+ * Clearer and shorter log messages when a socket write fails.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#178](https://github.com/rabbitmq/rabbitmq-mqtt/pull/178)
+
+
+### LDAP Plugin
+
+#### Enhancements
+
+ * The plugin now uses its own [logging sink](https://www.rabbitmq.com/logging.html#advanced-configuration), which makes it possible to direct
+ all LDAP logs to a separate file.
+
+ Contributed by [Yury Alioshinov](https://github.com/Haster2004).
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#105](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/pull/105)
+
+
+## Shovel Plugin
+
+#### Bug Fixes
+
+ * Configurating a Shovel with a valid AMQP 1.0 destination could fail with an exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#50](https://github.com/rabbitmq/rabbitmq-shovel/issues/50)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.12.tar.gz`.
diff --git a/release-notes/3.7.13.md b/release-notes/3.7.13.md
new file mode 100644
index 0000000000..800ca28fea
--- /dev/null
+++ b/release-notes/3.7.13.md
@@ -0,0 +1,177 @@
+## RabbitMQ 3.7.13
+
+RabbitMQ `3.7.13` is a maintenance release. It focuses on bug fixes and
+minor usability improvements. This release [**requires Erlang/OTP 20.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories)
+explains what package repositories and tools can be used to provision latest patch versions of
+Erlang `20.3.x` and `21.x`.
+
+### Compatibility Notes
+
+#### Minimum Required Erlang Version
+
+This release requires Erlang/OTP 20.3 or later.
+
+#### Upgrading to Erlang 21.x
+
+When upgrading to this release **and upgrading Erlang to 21.x** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and
+compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Binding and unbinding operations could fail with a `NOT_FOUND` channel exception if binding tables
+ got out of sync.
+
+ GitHub issues: [rabbitmq/rabbitmq-server#1873](https://github.com/rabbitmq/rabbitmq-server/issues/1873), [rabbitmq/rabbitmq-server#1878](https://github.com/rabbitmq/rabbitmq-server/pull/1878), [rabbitmq/rabbitmq-server#1884](https://github.com/rabbitmq/rabbitmq-server/pull/1884)
+
+ * Eliminated an accumulating inefficiency around topic-based authorisation (only topic
+ exchanges are affected).
+
+ Contributed by [Ayanda Dube](https://github.com/Ayanda-D).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1886](https://github.com/rabbitmq/rabbitmq-server/pull/1886)
+
+ * TCP connections could accumulate when [Proxy protocol](https://www.rabbitmq.com/networking.html#proxy-protocol)
+ was enabled and proxy's TCP connections were closed before Proxy protocol header was received and parsed.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1901](https://github.com/rabbitmq/rabbitmq-server/issues/1901)
+
+ * When a queue was deleted, [internal events](https://github.com/rabbitmq/rabbitmq-event-exchange)
+ about consumer cancellation (`consumer.deleted`) were not emitted.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1892](https://github.com/rabbitmq/rabbitmq-server/issues/1892)
+
+ * Connection tracking subsystem failed and logged an exception when connection was closed before it could
+ be registered.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1898](https://github.com/rabbitmq/rabbitmq-server/pull/1898)
+
+#### Enhancements
+
+ * Exclusive queue property mismatch during queue declaration now produces a more helpful
+ error message.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1887](https://github.com/rabbitmq/rabbitmq-server/issues/1887)
+
+ * Erlang 22 forward compatibility: support more cipher suites for config value encryption.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#307](https://github.com/rabbitmq/rabbitmq-common/pull/307)
+
+
+### CLI Tools
+
+#### Enhancements
+
+ * `rabbitmqctl shutdown` in `--wait` mode now uses node names instead of `gethostname(2)` when inferring
+ if target node is remote. This is more reliable as it takes explicitly set node names and `ERL_INETRC`
+ settings into account. In addition, it treats all nodes that have `@localhost` in the name as local
+ (running on the same host).
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#327](https://github.com/rabbitmq/rabbitmq-cli/issues/327).
+
+ * `rabbitmqctl forget_cluster_node` produces a more helpful error message when asked to remove a node with running
+ RabbitMQ application.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#323](https://github.com/rabbitmq/rabbitmq-cli/issues/323)
+
+ * New command, `rabbitmqctl force_gc`, that requests fullsweep GC for all processes on the target node.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#319](https://github.com/rabbitmq/rabbitmq-cli/issues/319)
+
+ * All RabbitMQ CLI tools now support `--version` as well as `version` as a command.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#314](https://github.com/rabbitmq/rabbitmq-cli/issues/314)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * `rabbitmqadmin`'s' `declare vhost_limit` and `delete vhost_limit` did not support the vhost
+ parameter.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#667](https://github.com/rabbitmq/rabbitmq-management/issues/667)
+
+ * `PUT /api/vhost-limits/{vhost}/{limit}` documentation was incorrect and used confusing language.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#667](https://github.com/rabbitmq/rabbitmq-management/issues/667)
+
+
+### Debian and RPM Packages
+
+#### Bug Fixes
+
+ * Debian and RPM packages no longer source `/etc/profile` in post installation scripts.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#93](https://github.com/rabbitmq/rabbitmq-server-release/issues/93)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Federation links could fail to start with an `{error, already_started} `.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#84](https://github.com/rabbitmq/rabbitmq-federation/issues/84)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * AMQP 1.0 endpoint URIs could fail to parse.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#23](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/issues/23)
+
+
+### HTTP AuthN/AuthZ Backend Plugin
+
+#### Enhnanceements
+
+ * User tags are now passed to virtual host, resource and topic authorisation check endpoints as a space-separated string.
+
+ Contributed by [romerod](https://github.com/romerod).
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-http#79](https://github.com/rabbitmq/rabbitmq-auth-backend-http/issues/79)
+
+
+### Trust Store Plugin
+
+#### Bug Fixes
+
+ * HTTP provider did not correctly update `If-Modified-Since` request header when certificates
+ (and thus their `Last-Modified` response header) were updated.
+
+ GitHub issue: [rabbitmq/rabbitmq-trust-store#68](https://github.com/rabbitmq/rabbitmq-trust-store/issues/68)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.13.tar.gz`.
diff --git a/release-notes/3.7.14.md b/release-notes/3.7.14.md
new file mode 100644
index 0000000000..13547c32f2
--- /dev/null
+++ b/release-notes/3.7.14.md
@@ -0,0 +1,100 @@
+## RabbitMQ 3.7.14
+
+RabbitMQ `3.7.14` is a maintenance release. It focuses on bug fixes and minor usability improvements. This release [**requires Erlang/OTP 20.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains what package repositories and tools can be used to provision latest patch versions of Erlang `20.3.x` and `21.x`.
+
+### Compatibility Notes
+
+#### Minimum Required Erlang Version
+
+This release requires Erlang/OTP 20.3 or later.
+
+#### Upgrading to Erlang 21.x
+
+When upgrading to this release **and upgrading Erlang to 21.x** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Enhancements
+
+ * TCP and TLS listeners are started later to reduce the probability of clients connecting to and performing operations
+ on a node that's not fully initialised.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1869](https://github.com/rabbitmq/rabbitmq-server/issues/1869)
+
+ * Reduced schema data store locking for binding operations.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1900](https://github.com/rabbitmq/rabbitmq-server/pull/1900)
+
+
+### CLI Tools
+
+#### Enhancements
+
+ * More informative and user-friendly help messages produced by `help` and `help [command]` commands.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#332](https://github.com/rabbitmq/rabbitmq-cli/pull/332)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Groups of upstream URIs were not formatted correctly.
+
+ Contributed by [Grigory Starinkin](https://github.com/velimir).
+
+ GitHub issue: [rabbitmq/rabbitmq-management#683](https://github.com/rabbitmq/rabbitmq-management/pull/683)
+
+#### Enhancements
+
+ * `management.load_definitions` now can point at a directory of definition files, not just a single file.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#665](https://github.com/rabbitmq/rabbitmq-management/issues/665)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Federation links could fail to start with an `{error, already_started}`.
+
+ GitHub issues: [rabbitmq/rabbitmq-federation#84](https://github.com/rabbitmq/rabbitmq-federation/issues/84), [rabbitmq/rabbitmq-federation#87](https://github.com/rabbitmq/rabbitmq-federation/pull/87)
+
+
+### RPM Package
+
+#### Bug Fixes
+
+ * RPM package post-install scripts now restart the node in case it was running before the upgrade.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#94](https://github.com/rabbitmq/rabbitmq-server-release/issues/94)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.14.tar.gz`.
diff --git a/release-notes/3.7.15.md b/release-notes/3.7.15.md
new file mode 100644
index 0000000000..a0f3d9903c
--- /dev/null
+++ b/release-notes/3.7.15.md
@@ -0,0 +1,255 @@
+## RabbitMQ 3.7.15
+
+RabbitMQ `3.7.15` is a maintenance release. It focuses on bug fixes and minor usability improvements.
+This release [**requires Erlang/OTP 20.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+It is also the first release to support Erlang 22.
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `20.3.x` and `21.x`.
+
+### Compatibility Notes
+
+#### Minimum Required Erlang Version
+
+This release requires Erlang/OTP 20.3 or later.
+
+#### Upgrading to Erlang 21.x
+
+When upgrading to this release **and upgrading Erlang to 21.x** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * `rabbitmq.conf` validation errors on startup unintentionally lacked a lot of relevant details.
+
+ Contributed by Grigory Starinkin.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1985](https://github.com/rabbitmq/rabbitmq-server/issues/1985)
+
+ * Message store compaction could enter a race condition with queue operations.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2000](https://github.com/rabbitmq/rabbitmq-server/issues/2000)
+
+ * Internal schema data store consistency checks used an on-disk location which could be unintentionally
+ preserved between deployments and make subsequent deployments fail. Now an entirely in-memory set of tables
+ is used for schema consistency checks.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1960](https://github.com/rabbitmq/rabbitmq-server/pull/1960)
+
+ * When [publisher confirms](https://www.rabbitmq.com/confirms.html) had to send both positive and negative acknowledgements around the same time,
+ it could be done in a way that would violate expectations of most client libraries.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1719](https://github.com/rabbitmq/rabbitmq-server/pull/1719)
+
+ * Policy validation could allow invalid values in certain cases.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1999](https://github.com/rabbitmq/rabbitmq-server/pull/1999)
+
+ * `amq.rabbitmq.log` exchage was lazily initialised and could be temporarily unavailable after node start.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1973](https://github.com/rabbitmq/rabbitmq-server/issues/1973)
+
+ * Clearing a policy with `overflow` behaviour or removing the key from it did not
+ roll back the overflow behaviour to `drop-head` (the default).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1980](https://github.com/rabbitmq/rabbitmq-server/issues/1980)
+
+ * Missing `getconf` command (used to fetch platform's memory page size) is now handled more gracefully
+ with a default value.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#318](https://github.com/rabbitmq/rabbitmq-common/issues/318)
+
+
+#### Enhancements
+
+ * Initial Erlang 22 compatibility.
+
+ GitHub issues: [rabbitmq/rabbitmq-common#315](https://github.com/rabbitmq/rabbitmq-common/issues/315),
+ [rabbitmq/rabbitmq-common#313](https://github.com/rabbitmq/rabbitmq-common/pull/313),
+ [rabbitmq/rabbitmq-common#323](https://github.com/rabbitmq/rabbitmq-common/pull/323)
+
+ * Connection tracking is now more efficient, reducing node load
+ and internal event backlog in case of [high connection churn](https://www.rabbitmq.com/networking.html#dealing-with-high-connection-churn).
+
+ GitHub issues: [rabbitmq/rabbitmq-server#1971](https://github.com/rabbitmq/rabbitmq-server/pull/1971),
+ [rabbitmq/rabbitmq-server#1975](https://github.com/rabbitmq/rabbitmq-server/pull/1975)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Enabled kernel sendfile for HTTP API documentation pages could lead to leaked file descriptors
+ when those pages are accessed repeatedly (e.g. a tab is always open for weeks and weeks).
+ The plugin now disables sendfile for those static files by default. It can be manually enabled
+ via configuration as needed.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#698](https://github.com/rabbitmq/rabbitmq-management/pull/698)
+
+ * Listing permissions could result in a 500 response.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#688](https://github.com/rabbitmq/rabbitmq-management/pull/688)
+
+ * When CORS headers were enabled, binding endpoints were inaccessible due to an exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#687](https://github.com/rabbitmq/rabbitmq-management/pull/687)
+
+ * An option to display message rates for the last 8 and 24 hours was hidden in the UI even when data
+ for those time intervals was available.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#693](https://github.com/rabbitmq/rabbitmq-management/pull/693)
+
+ * Information unit suffix incorrectly suggested that values were powers of 10 (gigabyte) instead of
+ power of 2 (gibibyte).
+
+ GitHub issue: [rabbitmq/rabbitmq-management#694](https://github.com/rabbitmq/rabbitmq-management/pull/694)
+
+
+### AMQP 1.0 Plugin
+
+#### Bug Fixes
+
+ * Default host specified in `amqp1_0.default_vhost` (`rabbitmq_amqp1_0.default_vhost` in classic config format)
+ now takes precedence over `default_vhost` (`rabbit.default_vhost`) in the core broker.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0#86](https://github.com/rabbitmq/rabbitmq-amqp1.0/pull/86)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Connections to source and destination nodes are now correctly closed if topology operations
+ fail on either end (e.g. due to insufficient permissions).
+
+ Contributed by Grigory Starinkin.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#54](https://github.com/rabbitmq/rabbitmq-shovel/pull/54)
+
+ * Improved handling of cases where AMQP 1.0 endpoint connection was refused access to a virtual host.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#57](https://github.com/rabbitmq/rabbitmq-shovel/pull/57)
+
+#### Enhancements
+
+ * The plugin now provides a new CLI command and HTTP API endpoint for restarting
+ a dynamic Shovel.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#48](https://github.com/rabbitmq/rabbitmq-shovel/issues/48)
+
+
+### Federation Plugin
+
+#### Enhancements
+
+ * Queue federation now uses a more informative consumer tag value.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#66](https://github.com/rabbitmq/rabbitmq-federation/issues/66)
+
+
+## CLI Tools
+
+### Bug Fixes
+
+ * Streamling `list_*` commands could hang when encountering an error from one of the cluster nodes.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#336](https://github.com/rabbitmq/rabbitmq-cli/issues/336)
+
+ * `rabbitmqctl delete_queue -u` failed argument validation due to a typo.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#339](https://github.com/rabbitmq/rabbitmq-cli/issues/339)
+
+ * `rabbitmq-diagnostics cipher_suites` now provides a way to display all available (not just enabled)
+ cipher suites.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#342](https://github.com/rabbitmq/rabbitmq-cli/issues/342)
+
+### Enhancements
+
+ * When an unsupported node name is used, CLI tools will now produce more specific error messages.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#345](https://github.com/rabbitmq/rabbitmq-cli/issues/345)
+
+ * `rabbitmq-diagnostics erlang_version` now supports `--offline`.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#315](https://github.com/rabbitmq/rabbitmq-cli/issues/315)
+
+
+### Web STOMP Plugin
+
+#### Enhancements
+
+ * Default STOMP plugin log is now used for `CONNECT` frames without credentials.
+
+ Contributed by Nick Goossens.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#110](https://github.com/rabbitmq/rabbitmq-web-stomp/pull/110)
+
+
+### Erlang AMQP 0-9-1 Client
+
+#### Bug Fixes
+
+ * Direct connections (via Erlang distribution) from outside of a RabbitMQ should no longer fail due to
+ a dependency on a module that was previously a part of core RabbitMQ broker.
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#91](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/91)
+
+
+### Auth Backend Cache Plugin
+
+#### Bug Fixes
+
+ * Virtual host endpoint requests were not cached.
+
+ GitHub issues: [rabbitmq/rabbitmq-auth-backend-cache#20](https://github.com/rabbitmq/rabbitmq-auth-backend-cache/issues/20),
+ [rabbitmq/rabbitmq-auth-backend-cache#24](https://github.com/rabbitmq/rabbitmq-auth-backend-cache/issues/24)
+
+
+### Top Plugin
+
+#### Bug Fixes
+
+ * Calculation worker process could fail due to an exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-top#34](https://github.com/rabbitmq/rabbitmq-top/issues/34)
+
+
+### JMS Topic Exchange Plugin
+
+#### Bug Fixes
+
+ * `LIKE` operator could be incorrectly evaluated.
+
+ GitHub issue: [rabbitmq/rabbitmq-jms-topic-exchange#18](https://github.com/rabbitmq/rabbitmq-jms-topic-exchange/issues/18)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.15.tar.xz`.
diff --git a/release-notes/3.7.16.md b/release-notes/3.7.16.md
new file mode 100644
index 0000000000..234c0614f2
--- /dev/null
+++ b/release-notes/3.7.16.md
@@ -0,0 +1,174 @@
+## RabbitMQ 3.7.16
+
+RabbitMQ `3.7.16` is a maintenance release. It focuses on bug fixes and minor usability improvements.
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### OpenSUSE Leap 42.3 Support Dropped
+
+OpenSUSE Leap 42.3 has [gone out of support on July 1st](https://en.opensuse.org/Lifetime), 2019. Packages for
+that distribution are therefore no longer produced because upstream OpenSUSE repositories are no longer
+available.
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * When a node hosting an exclusive queue was restarted with queue leader locator being something other than
+ "node local", redeclaration of the queue failed.
+
+ Contributed by Lajos Gerecs.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2039](https://github.com/rabbitmq/rabbitmq-server/issues/2039)
+
+#### Enhancements
+
+ * It is now possible to configure default worker pool size:
+
+ ``` ini
+ default_worker_pool_size = 1024
+ ```
+
+ This may be necessary in environments with a large number of queues (say, hundreds of thousands)
+ to speed up queue and binding recovery on node boot. By default the size of the pool is
+ computed as the number of [available runtime schedulers](https://www.rabbitmq.com/runtime.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2030](https://github.com/rabbitmq/rabbitmq-server/issues/2030)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl report` and `rabbitmqctl list_queues` could fail with a `{bad_argument,type}` because of a 3.8-specific column was erroneously listed as supported.
+
+ Contributed by Lajos Gerecs.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#354](https://github.com/rabbitmq/rabbitmq-cli/pull/354)
+
+#### Enhancements
+
+ * New health check: `rabbitmq-diagnostics check_virtual_hosts`.
+
+ [rabbitmq/rabbitmq-cli#302](https://github.com/rabbitmq/rabbitmq-cli/issues/302)
+
+ * New observability command, `rabbitmq-diagnostics log_location [--all]`, displays log file location(s)
+ on target node. The node is assumed to be running.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#325](https://github.com/rabbitmq/rabbitmq-cli/issues/325)
+
+ * New observability commands, `rabbitmq-diagnostics log_tail` and `rabbitmq-diagnostics log_tail_stream`,
+ provide a way to remotely inspect recent log file entries on a running node. The commands are meant
+ to be used for troubleshooting and not to automate log aggregation.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#326](https://github.com/rabbitmq/rabbitmq-cli/issues/326)
+
+ * `rabbitmq-diagnostics schema_info` displays information about internal database schema
+ (tables, their properties and metadata).
+
+ [rabbitmq/rabbitmq-cli#330](https://github.com/rabbitmq/rabbitmq-cli/issues/330)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * When multiple definition files were [imported on node boot](https://www.rabbitmq.com/management.html#load-definitions), errors were not reported
+ the same way they would be with a single file.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#710](https://github.com/rabbitmq/rabbitmq-management/pull/710)
+
+#### Enhancements
+
+ * The plugin now disables [client inactivity timeout](https://www.rabbitmq.com/management.html#advanced-options) for the definition import endpoints and reports
+ progress using periodic [`102 Processing` responses](https://httpstatuses.com/102) so that the connection is kept alive even if importing
+ definition takes a long time.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#711](https://github.com/rabbitmq/rabbitmq-management/pull/711)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * `3.7.15` introduced a regression where Shovels would not try to reconnect after a network
+ failure.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#60](https://github.com/rabbitmq/rabbitmq-shovel/issues/60)
+
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+ * Default maximum connection limit set by a dependency (1024) has been lifted.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#55](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/55)
+
+ * Client ID wasn't propagated to connection metadata for MQTT connections over WebSockets.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#57](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/57)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+ * Default maximum connection limit set by a dependency (1024) has been lifted.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#113](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/113)
+
+
+### Trust Store Plugin
+
+#### Enhancements
+
+ * It is now possiible to configure HTTPS proxy settings for the HTTPS backend.
+
+ GitHub issue: [rabbitmq/rabbitmq-trust-store#69](https://github.com/rabbitmq/rabbitmq-trust-store/issues/69)
+
+
+### Debian Package
+
+#### Bug Fixes
+
+ * Exit code 69 is now considered a success again in the systemd service file.
+
+ GitHub issues: [rabbitmq/rabbitmq-server-release#104](https://github.com/rabbitmq/rabbitmq-server-release/pull/104), [rabbitmq/rabbitmq-server-release#51](https://github.com/rabbitmq/rabbitmq-server-release/issues/51)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.16.tar.xz`.
diff --git a/release-notes/3.7.17.md b/release-notes/3.7.17.md
new file mode 100644
index 0000000000..f0b0e0947f
--- /dev/null
+++ b/release-notes/3.7.17.md
@@ -0,0 +1,122 @@
+## RabbitMQ 3.7.17
+
+RabbitMQ `3.7.17` is a maintenance release. It focuses on bug fixes and upgrades
+a JavaScript dependency in the management UI to patch three CVEs in that library.
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Enhancements
+
+ * `amqqueue` module and the feature flag subsystem were backported from 3.8. This makes it possible
+ for plugin developers to target both `3.7.x` and `3.8.x` release series.
+
+ Note that this only backports the API. There are no feature flags to enable in `3.7.17`.
+ No 3.8-specific features that must be enabled via feature flags were backported and there are no
+ plans to do it in later `3.7.x` releases.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2028](https://github.com/rabbitmq/rabbitmq-server/issues/2028)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl stop` description had a typo.
+
+ Contributed by Eugene Pirogov.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#363](https://github.com/rabbitmq/rabbitmq-cli/pull/363)
+
+#### Enhancements
+
+ * Commands that take passwords as arguments now accept them via standard input.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#365](https://github.com/rabbitmq/rabbitmq-cli/issues/365)
+
+ * `rabbitmq-diagnostics observer` no longer requires RabbitMQ application to be running on
+ the target node.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#368](https://github.com/rabbitmq/rabbitmq-cli/pull/368)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * jQuery has been upgraded to `3.4.x`. This series addresses a number of known CVEs in the library: [CVE-2015-9251](https://nvd.nist.gov/vuln/detail/CVE-2015-9251), [CVE-2017-16012](https://nvd.nist.gov/vuln/detail/CVE-2017-16012), [CVE-2019-11358](https://nvd.nist.gov/vuln/detail/CVE-2019-11358).
+
+ GitHub issue: [rabbitmq/rabbitmq-management#714](https://github.com/rabbitmq/rabbitmq-management/issues/714)
+
+ * Blank effective policy definitions were formatted as a JSON array instead of an empty object in API responses.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#701](https://github.com/rabbitmq/rabbitmq-management/issues/701)
+
+#### Usability
+
+ * Definition import endpoint no longer reports progress via `102 Processing` responses. They proved to be problematic
+ for some HTTP clients, including `rabbitmqadmin`.
+
+ GitHub issues: [rabbitmq/rabbitmq-management#715](https://github.com/rabbitmq/rabbitmq-management/issues/715), [rabbitmq/rabbitmq-management#718](https://github.com/rabbitmq/rabbitmq-management/pull/718)
+
+ * Leading and trailing white space characters are now highlighted in queue and exchange names to make them easier to spot.
+ Tab characters are highlighted as well, even in the middle of the word. The idea is that those characters are typically used by mistake and should be easier to notice.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#721](https://github.com/rabbitmq/rabbitmq-management/pull/721)
+
+ * It wasn't possible to delete a binding declared with `routing_key` set to `null` (only possible via the HTTP API).
+ Now such bindings can be deleted but this practice is highly discouraged. Future HTTP API versions might refuse to accept
+ `null` values as they are impossible to declare via AMQP 0-9-1.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#723](https://github.com/rabbitmq/rabbitmq-management/pull/723)
+
+
+### Shovel Management Plugin
+
+#### Bug Fixes
+
+ * Shovel restart link conflicted with that in the Federation management plugin. When both were enabled, only one
+ was actually functional.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel-management#35](https://github.com/rabbitmq/rabbitmq-shovel-management/issues/35)
+
+ * Shovel URI was misformatted for a period of time after UI refresh happened and before the Shovel was fully initialised and running.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel-management#37](https://github.com/rabbitmq/rabbitmq-shovel-management/issues/37)
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.17.tar.xz`.
diff --git a/release-notes/3.7.18.md b/release-notes/3.7.18.md
new file mode 100644
index 0000000000..5060f9b8ea
--- /dev/null
+++ b/release-notes/3.7.18.md
@@ -0,0 +1,163 @@
+## RabbitMQ 3.7.18
+
+RabbitMQ `3.7.18` is a maintenance release that includes a fix to [CVE-2019-11281](https://pivotal.io/security/cve-2019-11281).
+Kudos to [Markus @RareData Alvila](https://twitter.com/RareData) for responsibly disclosing the vulnerability.
+
+The release also contains bug fixes and internal
+API changes that refine the common plugin API with the [upcoming `3.8.0` version](https://github.com/rabbitmq/rabbitmq-server/releases/).
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 19.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+This release introduces credential obfuscation for Shovel connections and Federation links. This requires
+one more application (library) from Erlang/OTP, `tools`. Operators must make sure that the Erlang runtime
+provides it. On Debian-based systems, the package [`erlang-tools`](https://packages.ubuntu.com/bionic/erlang-tools) must be installed.
+
+This release contains a **breaking change to the plugin API** around queue state access introduced in `3.7.17`.
+Plugins that do not use that API revision are not affected. All plugins that ship with RabbitMQ have been updated
+as necessary.
+
+This revision was necessary to maintain a single API with the upcoming `3.8.0` release.
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Enhancements
+
+ * New style config file parser is now more forgiving, handles trailing whitespace and empty lines
+ better.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2073](https://github.com/rabbitmq/rabbitmq-server/pull/2073)
+
+ * `gen_server2` now includes two new functions, `stop/1` and `stop/3`, which `gen_server` in OTP
+ introduced a couple of releases ago.
+
+ Contributed by @codeadict.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#335](https://github.com/rabbitmq/rabbitmq-common/pull/335)
+
+#### Bug Fixes
+
+ * Log rotation is functional again on Windows.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2059](https://github.com/rabbitmq/rabbitmq-server/issues/2059)
+
+ * White space characters in `RABBITMQ_BASE` and other [configurable paths](https://www.rabbitmq.com/configure.html#customise-environment) are now handled better on Windows.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2025](https://github.com/rabbitmq/rabbitmq-server/issues/2025)
+
+ * Eliminate a "deprecated Ranch options" log warning.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2069](https://github.com/rabbitmq/rabbitmq-server/issues/2069)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * Two more TLS listener options are now available via new style configuration file:
+ `management.ssl.verify` and `management.ssl.fail_if_no_peer_cert`.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#735](https://github.com/rabbitmq/rabbitmq-management/issues/735)
+
+#### Bug Fixes
+
+ * HTTP API documentation correction.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#737](https://github.com/rabbitmq/rabbitmq-management/issues/737)
+
+
+### AMQP 1.0 Plugin
+
+#### Bug Fixes
+
+ * Connections that fail due to insufficient client permissions (authorisation failures) are now handled
+ gracefully with reasonable log messages.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0#92](https://github.com/rabbitmq/rabbitmq-amqp1.0/pull/92)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Sensitive values in Shovel connection state (namely, the connection credentials) are now stored in
+ encrypted form. This avoids unintentional credential logging by the runtime (exception logger)
+ at the cost of making troubleshooting authentication failures harder.
+
+ GitHub issue: [rabbitmq-erlang-client#123](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/123)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Sensitive values in Federation link state (namely, the connection credentials) are now stored in
+ encrypted form. This avoids unintentional credential logging by the runtime (exception logger)
+ at the cost of making troubleshooting authentication failures harder.
+
+ GitHub issue: [rabbitmq-erlang-client#123](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/123)
+
+
+### RabbitMQ Erlang Client
+
+#### Bug Fixes
+
+ * Sensitive values in connection state (namely, the connection credentials) are now stored in
+ encrypted form. This avoids unintentional credential logging by the runtime (exception logger)
+ at the cost of making troubleshooting authentication failures harder.
+
+ GitHub issue: [rabbitmq-erlang-client#123](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/123)
+
+
+### Web STOMP Examples Plugin
+
+#### Bug Fixes
+
+ * jQuery was upgraded to `3.4.x`.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp-examples#18](https://github.com/rabbitmq/rabbitmq-web-stomp-examples/issues/18)
+
+
+### Web MQTT Examples Plugin
+
+#### Bug Fixes
+
+ * jQuery was upgraded to `3.4.x`.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt-examples#2](https://github.com/rabbitmq/rabbitmq-web-mqtt-examples/issues/2)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.18.tar.xz`.
diff --git a/release-notes/3.7.19.md b/release-notes/3.7.19.md
new file mode 100644
index 0000000000..6f26fec960
--- /dev/null
+++ b/release-notes/3.7.19.md
@@ -0,0 +1,114 @@
+## RabbitMQ 3.7.19
+
+RabbitMQ `3.7.19` is a maintenance release that focuses on bug fixes.
+RabbitMQ `3.7.x` series is supported [through March 2020](https://www.rabbitmq.com/versions.html).
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * `rabbit_diagnostics:binary_refs/1`, a predecessor to Recon used for binary heap inspection,
+ could fail with an exception due to a race condition.
+
+ Contributed by John Eckersberg (@jeckersb).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2117](https://github.com/rabbitmq/rabbitmq-server/pull/2117)
+
+ * It was not possible to set `ERL_MAX_PORTS` in `rabbitmq-env-conf.bat`.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2084](https://github.com/rabbitmq/rabbitmq-server/issues/2084)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Import of large definition files submitted via management UI form could fail.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#739](https://github.com/rabbitmq/rabbitmq-management/issues/739)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmq-diagnostics node_health_check` would pass even when some cluster nodes had
+ network partitions in effect.
+
+ Contributed by Lajos Gerecs (@luos).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2127](https://github.com/rabbitmq/rabbitmq-server/pull/2127)
+
+ * `rabbitmqctl set_parameter` output could be misleading.
+
+ Contributed by @Whitespirit0.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#376](https://github.com/rabbitmq/rabbitmq-cli/issues/376)
+
+
+### STOMP Plugin
+
+#### Bug Fixes
+
+ * Cleanup of a subscription after a server-initiated [consumer cancelation](https://www.rabbitmq.com/consumer-cancel.html)
+ failed with an exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-stomp#136](https://github.com/rabbitmq/rabbitmq-stomp/issues/136)
+
+
+### Event Exchange Plugin
+
+#### Bug Fixes
+
+ * Events could fail to be emitted in multi-node clusters.
+
+ GitHub issue: [rabbitmq/rabbitmq-event-exchange#41](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/41)
+
+
+### Management Exchange Plugin
+
+#### Bug Fixes
+
+ * The plugin was not updated to correctly fetch a list of TCP and TLS listeners used
+ by the management plugin.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-exchange#8](https://github.com/rabbitmq/rabbitmq-management-exchange/issues/8)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.19.tar.xz`.
diff --git a/release-notes/3.7.2.md b/release-notes/3.7.2.md
new file mode 100644
index 0000000000..ea0fb98b30
--- /dev/null
+++ b/release-notes/3.7.2.md
@@ -0,0 +1,30 @@
+## RabbitMQ 3.7.2
+
+RabbitMQ `3.7.2` fixes an issue in the HTTP auth backend plugin that was introduced in `3.7.1`.
+It has no other functional changes compared to `3.7.1`.
+
+### Upgrades and Compatibility
+
+This release has no known incompatibilities with earlier 3.7.x versions.
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades.
+
+See [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Auth Backend HTTP Plugin
+
+#### Bug Fixes
+
+ * Plugin threw an exception while performing HTTP requests.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-http#62](https://github.com/rabbitmq/rabbitmq-auth-backend-http/issues/62)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.2.tar.gz`.
diff --git a/release-notes/3.7.20.md b/release-notes/3.7.20.md
new file mode 100644
index 0000000000..c3f9aa5325
--- /dev/null
+++ b/release-notes/3.7.20.md
@@ -0,0 +1,163 @@
+## RabbitMQ 3.7.20
+
+RabbitMQ `3.7.20` is a maintenance release that focuses on bug fixes.
+RabbitMQ `3.7.x` series is supported [through March 2020](https://www.rabbitmq.com/versions.html).
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Rolling cluster upgrade could fail if new versions were deployed to all cluster nodes at once instead
+ of a rolling upgrade-then-restart for each node individually.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2132](https://github.com/rabbitmq/rabbitmq-server/issues/2132)
+
+ * Improved error handling in a module that continuously registers the node with [epmd](https://www.rabbitmq.com/clustering.html) avoids
+ log noise.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2130](https://github.com/rabbitmq/rabbitmq-server/issues/2130)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl await_startup` failed with an exception when effective timeout was set to `infinity`.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2144](https://github.com/rabbitmq/rabbitmq-server/pull/2144)
+
+ * `rabbitmq-diagnostics check_port_connectivity` produced a false positive in an IPv6-only environment.
+
+ Contributed by Gabriele Santomaggio.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#385](https://github.com/rabbitmq/rabbitmq-cli/pull/385)
+
+ * `rabbitmq-diagnostics status`, `rabbitmq-diagnostics cluster_status`, `rabbitmq-diagnostics listeners` now support
+ `--formatter=erlang` (raw Erlang data structure output)
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#383](https://github.com/rabbitmq/rabbitmq-cli/issues/383)
+
+
+### Kubernetes Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * Requests to Kubernetes API endpoints failed with an `nxdomain` (domain name resolution failure)
+ in an IPv6-only environment. Now the plugin will configure its HTTP[S] client to use IPv6 if
+ the [inetrc file](http://erlang.org/doc/apps/erts/inet_cfg.html) tells it to.
+ See [Using IPv6 for Inter-node Communication (and CLI Tools)](https://www.rabbitmq.com/networking.html#distribution-ipv6) to learn more.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-k8s#55](https://github.com/rabbitmq/rabbitmq-peer-discovery-k8s/issues/55)
+
+### AWS Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * The plugin will now configure its HTTP[S] client to use IPv6 if
+ the [inetrc file](http://erlang.org/doc/apps/erts/inet_cfg.html) tells it to.
+ See [Using IPv6 for Inter-node Communication (and CLI Tools)](https://www.rabbitmq.com/networking.html#distribution-ipv6) to learn more.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-aws#32](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws/pull/32)
+
+
+### Consul Peer Discovery Plugin
+
+#### Enhancements
+
+ * It is now possible to specify service metadata values:
+
+ ``` ini
+ cluster_formation.consul.svc_meta.key1 = value1
+ cluster_formation.consul.svc_meta.key2 = value2
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-consul#34](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/issues/34)
+
+#### Bug Fixes
+
+ * The plugin will now configure its HTTP[S] client to use IPv6 if
+ the [inetrc file](http://erlang.org/doc/apps/erts/inet_cfg.html) tells it to.
+ See [Using IPv6 for Inter-node Communication (and CLI Tools)](https://www.rabbitmq.com/networking.html#distribution-ipv6) to learn more.
+
+ GitHub issue: [rabbitmq-peer-discovery-consul#36](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/pull/36)
+
+
+
+### etcd Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * The plugin will now configure its HTTP[S] client to use IPv6 if
+ the [inetrc file](http://erlang.org/doc/apps/erts/inet_cfg.html) tells it to.
+ See [Using IPv6 for Inter-node Communication (and CLI Tools)](https://www.rabbitmq.com/networking.html#distribution-ipv6) to learn more.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#19](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/pull/19)
+
+
+
+### Erlang Client
+
+#### Bug Fixes
+
+ * Connection could not be restarted after a heartbeat timeout due to strict pattern matching.
+
+ Contributed by Giuseppe D'Anna (@larrywax).
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#126](https://github.com/rabbitmq/rabbitmq-erlang-client/pull/126)
+
+
+### LDAP Plugin
+
+#### Enhancements
+
+ * The plugin will no log a warning when DN lookup settings are incomplete.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#114](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/pull/114)
+
+
+### Tracing Plugin
+
+#### Bug Fixes
+
+ * On Windows, deleting the file used by an active (running) trace resulted in an `EACCES` exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-tracing#31](https://github.com/rabbitmq/rabbitmq-tracing/issues/31)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.20.tar.xz`.
diff --git a/release-notes/3.7.21.md b/release-notes/3.7.21.md
new file mode 100644
index 0000000000..e41b516f6f
--- /dev/null
+++ b/release-notes/3.7.21.md
@@ -0,0 +1,98 @@
+## RabbitMQ 3.7.21
+
+RabbitMQ `3.7.21` is a maintenance release that includes a patch for [CVE-2019-11287](https://pivotal.io/security/cve-2019-11287).
+RabbitMQ `3.7.x` series is supported [through March 2020](https://www.rabbitmq.com/versions.html).
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Avoid pattern expansion when logging connection closure reason.
+
+ This addresses [CVE-2019-11287](https://pivotal.io/security/cve-2019-11287).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2155](https://github.com/rabbitmq/rabbitmq-server/pull/2155)
+
+
+### LDAP Plugin
+
+#### Enhancements
+
+ * Errors were not propagated back to the client correctly in case of an LDAP server authentication failure.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#116](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/116)
+
+
+### STOMP Plugin
+
+#### Bug Fixes
+
+ * `stomp.hide_server` is now available in the new style configuration format.
+
+ GitHub issue: [rabbitmq/rabbitmq-stomp#140](https://github.com/rabbitmq/rabbitmq-stomp/issues/140)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+ * The plugin emitted a warning on start.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#115](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/115)
+
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+ * The plugin emitted a warning on start.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#59](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/59)
+
+
+### Event Exchange Plugin
+
+#### Bug Fixes
+
+ * `event_exchange.vhost` is now available in the new style configuration format.
+
+ GitHub issue: [rabbitmq/rabbitmq-event-exchange#43](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/43)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.21.tar.xz`.
diff --git a/release-notes/3.7.22.md b/release-notes/3.7.22.md
new file mode 100644
index 0000000000..9c319a7920
--- /dev/null
+++ b/release-notes/3.7.22.md
@@ -0,0 +1,100 @@
+## RabbitMQ 3.7.22
+
+RabbitMQ `3.7.22` is a maintenance release that focuses on bug fixes.
+RabbitMQ `3.7.x` series is supported [through March 2020](https://www.rabbitmq.com/versions.html).
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * One-off Erlang VM starts used by startup scripts now do as little work as possible
+ in order to avoid starting subcomponents, loading configuration and so on.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#340](https://github.com/rabbitmq/rabbitmq-common/pull/340)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl await_startup` failed with an exception when RabbitMQ application was stopped but the
+ runtime (Erlang VM) was running.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2158](https://github.com/rabbitmq/rabbitmq-server/issues/2158)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Definition import via HTTP API could fail in if performed via HTTP API (but not management UI)
+ and contained operator policies.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#751](https://github.com/rabbitmq/rabbitmq-management/issues/751)
+
+ * When a node cannot compute the number of file descriptors it uses, it will be more resilient and will
+ log warnings (e.g. about missing `handle.exe` in `PATH` on Windows) much more proactively.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-agent#86](https://github.com/rabbitmq/rabbitmq-management-agent/issues/86)
+
+#### Enhancements
+
+ * Metric aggregation optimizations.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-agent#84](https://github.com/rabbitmq/rabbitmq-management-agent/pull/84)
+
+
+### MQTT Plugin
+
+#### Enhancements
+
+ * Throughput improvements and reduced CPU usage but slightly higher per connection RAM footprint.
+
+ Contributed by Grigory Starinkin (Erlang Solutions).
+
+ GitHub issues: [rabbitmq/rabbitmq-mqtt#216](https://github.com/rabbitmq/rabbitmq-mqtt/pull/216), [rabbitmq/rabbitmq-mqtt#217](https://github.com/rabbitmq/rabbitmq-mqtt/pull/217)
+
+#### Bug Fixes
+
+ * Last Will messages that use QoS 2 will now be downgraded to QoS 1 just like with "regular" published messages.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#214](https://github.com/rabbitmq/rabbitmq-mqtt/issues/214)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.22.tar.xz`.
diff --git a/release-notes/3.7.23.md b/release-notes/3.7.23.md
new file mode 100644
index 0000000000..15b5d29a4d
--- /dev/null
+++ b/release-notes/3.7.23.md
@@ -0,0 +1,78 @@
+## RabbitMQ 3.7.23
+
+RabbitMQ `3.7.23` is a maintenance release.
+RabbitMQ `3.7.x` series is supported [through March 2020](https://www.rabbitmq.com/versions.html).
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### MQTT Plugin
+
+#### Enhancements
+
+ * Throughput improvements ranging from 14 to 60 percent depending on workload.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2168](https://github.com/rabbitmq/rabbitmq-server/issues/2168)
+
+
+### AMQP 1.0 Plugin
+
+#### Enhancements
+
+ * Throughput improvements.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2168](https://github.com/rabbitmq/rabbitmq-server/issues/2168)
+
+
+### STOMP Plugin
+
+#### Enhancements
+
+ * Throughput improvements.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2168](https://github.com/rabbitmq/rabbitmq-server/issues/2168)
+
+
+### Web STOMP Plugin
+
+#### Enhancements
+
+ * Clients now can authenticate using an x.509 (TLS) certificate.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#116](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/116)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.23.tar.xz`.
diff --git a/release-notes/3.7.24.md b/release-notes/3.7.24.md
new file mode 100644
index 0000000000..458fee94f0
--- /dev/null
+++ b/release-notes/3.7.24.md
@@ -0,0 +1,114 @@
+## RabbitMQ 3.7.24
+
+RabbitMQ `3.7.24` is a maintenance release.
+RabbitMQ `3.7.x` series is supported [through March 2020](https://www.rabbitmq.com/versions.html).
+
+### Erlang/OTP Compatibility Notes
+
+Per the new Erlang version support policy in effect starting with January 2019,
+this release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Autoheal could fail to finish if one of its state transitions initiated by a remote node timed out.
+
+ Contributed by @tomyouyou.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2209](https://github.com/rabbitmq/rabbitmq-server/pull/2209)
+
+ * Reduced scheduled GC activity in connection socket writer to one run per 1 GiB of data transferred,
+ with an option to change the value or disable scheduled run entirely. More frequent binary heap GC runs seem
+ unneccesary on Erlang 22.
+
+ Contributed by Aleksander Nycz.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#343](https://github.com/rabbitmq/rabbitmq-common/pull/343)
+
+ * The node now can recover even if virtual host recovery terms file was corrupted.
+
+ Contributed by @tomyouyou.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2195](https://github.com/rabbitmq/rabbitmq-server/pull/2195)
+
+ * Better error reporting in an edge case with classic queue leader replica election.
+
+ Contributed by Ayanda Dube.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#355](https://github.com/rabbitmq/rabbitmq-common/pull/355)
+
+ * Fixed an incorrect return value in the internal queue data store module.
+
+ Contributed by Sergey Loguntsov.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2221](https://github.com/rabbitmq/rabbitmq-server/pull/2221)
+
+ * Syslog client is now started even when Syslog logging is configured only for some log sinks.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2044](https://github.com/rabbitmq/rabbitmq-server/issues/2044)
+
+#### Enhancements
+
+ * RabbitMQ nodes will now gracefully shutdown when receiving a `SIGTERM` or `SIGQUIT` signals.
+ Previously the runtime would invoke a default handler that terminates the VM giving
+ RabbitMQ no chance to execute its shutdown steps.
+
+ In addition, `SIGUSER1` and `SIGUSR2` signals are ignored.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2222](https://github.com/rabbitmq/rabbitmq-server/issues/2222)
+
+ * Some proxy protocol errors are now logged at debug level. This reduces log noise in environments
+ where TCP load balancers and proxies perform health checks by opening a TCP connection but never sending
+ any data.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2204](https://github.com/rabbitmq/rabbitmq-server/pull/2204)
+
+
+### Management and Management Agent Plugins
+
+### Enhancements
+
+ * A new `POST /login` endpoint can be used by custom management UI login forms to authenticate the user
+ and set the cookie.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#764](https://github.com/rabbitmq/rabbitmq-management/issues/764)
+
+ * Warning about a missing `handle.exe` in `PATH` on Windows is now only logged every few minutes.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-agent#90](https://github.com/rabbitmq/rabbitmq-management-agent/issues/90)
+
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.24.tar.xz`.
diff --git a/release-notes/3.7.25.md b/release-notes/3.7.25.md
new file mode 100644
index 0000000000..ef5d6ab3c7
--- /dev/null
+++ b/release-notes/3.7.25.md
@@ -0,0 +1,115 @@
+## RabbitMQ 3.7.25
+
+RabbitMQ `3.7.25` is a maintenance release. It focuses on bug fixes and usability
+improvements.
+
+This is **the last generally supported `3.7.x` release**.
+RabbitMQ `3.7.x` series are now covered by the [limited extended support](https://www.rabbitmq.com/versions.html) policy.
+
+Please consider [upgrading](https://www.rabbitmq.com/upgrade.html) to [RabbitMQ `3.8.x`](https://www.rabbitmq.com/changelog.html).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Previously recent `3.7.x` nodes could start on Erlang 20.3 and to some extent tested on it despite
+the Erlang `21.3+` requirements in release notes and documentation.
+
+Starting with this release, `21.3+` is a **hard requirement** enforced in the code
+to [ensure forward compatibility with Erlang 23](https://github.com/rabbitmq/rabbitmq-server/pull/2269).
+
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Enhancements
+
+ * Erlang `20.3` and earlier version support has been completely removed after months of being documented as unsupported.
+ This release, however, gains Erlang `23.0` forward compatibility.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2269](https://github.com/rabbitmq/rabbitmq-server/pull/2269)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * More frequent garbage collection by stats DB cache processes in environments where HTTP API queries produced
+ large data sets.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#788](https://github.com/rabbitmq/rabbitmq-management/pull/788)
+
+ * `rabbitmqadmin` binding deletion now defaults to the same property key value as binding creation.
+
+ Contributed by Ivan Kaloyanov.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#779](https://github.com/rabbitmq/rabbitmq-management/pull/779)
+
+#### Bug Fixes
+
+ * `rabbitmqadmin import` mangled non-ASCII characters.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#785](https://github.com/rabbitmq/rabbitmq-management/issues/785)
+
+
+### Federation Plugin
+
+#### Enhancements
+
+ * Internal resources cleanup is now more selective on abnormal link termination. A new upstream parameter, `resource-cleanup-mode`,
+ can be set to `"never"` used to prevent internal queues from being deleted by exchange federation links on termination.
+ Use this when data safety is more important than protection from resource leaks. It then will be up to the operator to make
+ sure such resources are periodically deleted or use a suitable [TTL](https://www.rabbitmq.com/ttl.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#105](https://github.com/rabbitmq/rabbitmq-federation/issues/105)
+
+
+### Shovel Management Plugin
+
+#### Bug Fixes
+
+ * Shovels are now correctly removed even when deletion commands are executed against a different node
+ (not the one on which the target Shovel has been created).
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#68](https://github.com/rabbitmq/rabbitmq-shovel/issues/68)
+
+
+### Trust Store Plugin
+
+#### Enhancements
+
+ * HTTPS certificate provider will no longer terminate if upstream service response contains
+ invalid JSON.
+
+ GitHub issue: [rabbitmq/rabbitmq-trust-store#73](https://github.com/rabbitmq/rabbitmq-trust-store/issues/73)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.25.tar.xz`.
diff --git a/release-notes/3.7.26.md b/release-notes/3.7.26.md
new file mode 100644
index 0000000000..743706a146
--- /dev/null
+++ b/release-notes/3.7.26.md
@@ -0,0 +1,79 @@
+## RabbitMQ 3.7.26
+
+RabbitMQ `3.7.26` is a maintenance release. It focuses on bug fixes and usability
+improvements.
+
+RabbitMQ `3.7.x` series are out of general support and covered by the [limited extended support](https://www.rabbitmq.com/versions.html) policy.
+
+Please consider [upgrading](https://www.rabbitmq.com/upgrade.html) to [RabbitMQ `3.8.x`](https://www.rabbitmq.com/changelog.html).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Previously recent `3.7.x` nodes could start on Erlang 20.3 and to some extent tested on it despite
+the Erlang `21.3+` requirements in release notes and documentation.
+
+Starting with this release, `21.3+` is a **hard requirement** enforced in the code
+to [ensure forward compatibility with Erlang 23](https://github.com/rabbitmq/rabbitmq-server/pull/2269).
+
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Federation Plugin
+
+#### Enhancements
+
+ * "Command" operations such as binding propagation now use a separate channel for all links, preventing latency
+ spikes for asynchronous operations (such as message publishing) (a head-of-line blocking problem).
+
+ Contributed by Grigory Starinkin.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#97](https://github.com/rabbitmq/rabbitmq-federation/pull/97)
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * `rabbitmqadmin` failed with an exception on Python 3 when `-f pretty_json` flag was used.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#794](https://github.com/rabbitmq/rabbitmq-management/issues/794)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * AMQP 1.0 client used by Shovel now correctly encodes Erlang atoms.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#45](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/issues/45)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.26.tar.xz`.
diff --git a/release-notes/3.7.27.md b/release-notes/3.7.27.md
new file mode 100644
index 0000000000..a69634efd9
--- /dev/null
+++ b/release-notes/3.7.27.md
@@ -0,0 +1,83 @@
+## RabbitMQ 3.7.27
+
+RabbitMQ `3.7.27` is a maintenance release. It contains a couple of backports
+from the `3.8.x` branch.
+
+RabbitMQ `3.7.x` series are out of general support and covered by the [limited extended support policy](https://www.rabbitmq.com/versions.html) only.
+
+Please consider [upgrading](https://www.rabbitmq.com/upgrade.html) to [RabbitMQ `3.8.x`](https://www.rabbitmq.com/changelog.html).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Erlang `21.3+` is now a **hard requirement** checked on node startup.
+
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Enhancements
+
+ * More peer discovery errors are retried.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1627](https://github.com/rabbitmq/rabbitmq-server/issues/1627)
+
+ * Initial Erlang/OTP 24 compatibility.
+
+ Erlang/OTP 24 is expected to ship in mid-2021. While RabbitMQ 3.7 will be [out of support](https://www.rabbitmq.com/versions.html) by then,
+ some initial compatibility work was backported to this branch due to contious integration on Erlang/OTP master.
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * One-off connections periodically used by exchange federation links now handle timeouts in certain
+ operations gracefully. This avoids unnecessary link disruption and restarts in certain environments.
+ The default interval has also been bumped from 30s to 90s.
+
+ In environments with stable topologies the interval can be significantly increased to decrease connection
+ churn, simplify monitoring and in general conserve some resources:
+
+ ``` erlang
+ %% advanced.config
+ [
+ {rabbitmq_federation, [
+ %% 86400 seconds or 24 hours
+ {internal_exchange_check_interval, 86400000}
+ ]}
+ ].
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#107](https://github.com/rabbitmq/rabbitmq-federation/pull/107)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.27.tar.xz`.
diff --git a/release-notes/3.7.28.md b/release-notes/3.7.28.md
new file mode 100644
index 0000000000..0156fa2dee
--- /dev/null
+++ b/release-notes/3.7.28.md
@@ -0,0 +1,64 @@
+## RabbitMQ 3.7.28
+
+RabbitMQ `3.7.28` is a security patch release.
+
+
+RabbitMQ `3.7.x` series are out of general support and covered by the [limited extended support policy](https://www.rabbitmq.com/versions.html)
+through October 1st, 2020.
+Please consider [upgrading](https://www.rabbitmq.com/upgrade.html) to [RabbitMQ `3.8.x`](https://www.rabbitmq.com/changelog.html).
+
+RabbitMQ Core team would like to thank Ofir Hamam and Tomer Hadad at Ernst & Young's Hacktics Advanced Security Center
+for researching and responsibly disclosing the vulnerability addressed in this release.
+
+### Erlang/OTP Compatibility Notes
+
+This release [**no longer supports Erlang/OTP 20.3**](https://groups.google.com/d/msg/rabbitmq-users/G4UJ9zbIYHs/qCeyjkjyCQAJ).
+Erlang `21.3+` is now a **hard requirement** checked on node startup.
+
+Make sure a [supported Erlang version](https://www.rabbitmq.com/which-erlang.html) is used before upgrading.
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Addressed a Windows-specific binary planting security vulnerability [CVE-2020-5419](https://tanzu.vmware.com/security/cve-2020-5419) that allowed for arbitrary code execution.
+ The vulnerability requires the attacker to have local access and elevated privileges,
+ and cannot be executed remotely.
+
+ [CVSS score](https://www.first.org/cvss/calculator/3.0#CVSS:3.0/AV:L/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H): `6.7` (medium severity).
+
+ This vulnerability was researched and [responsibly disclosed](https://www.rabbitmq.com/contact.html#security) by
+ Ofir Hamam and Tomer Hadad at Ernst & Young's Hacktics Advanced Security Center.
+
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.7.28.tar.xz`.
diff --git a/release-notes/3.7.3.md b/release-notes/3.7.3.md
new file mode 100644
index 0000000000..46f276ecc6
--- /dev/null
+++ b/release-notes/3.7.3.md
@@ -0,0 +1,110 @@
+## RabbitMQ 3.7.3
+
+RabbitMQ `3.7.3` is a patch release with some bug fixes and enhancements.
+
+### Upgrades and Compatibility
+
+This release has no known incompatibilities with earlier 3.7.x versions.
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades.
+
+See [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Logging to `amq.rabbitmq.log` exchange is available again as a logging mechanism. Note that
+ the user must opt-in by setting `log.exchange = true` in the [config file](http://rabbitmq.com/configure.html).
+
+ GitHub issue: [rabbitmq-server#1456](https://github.com/rabbitmq/rabbitmq-server/issues/1456)
+
+ * During node boot, the logger will temporarily accept a much higher maximum rate so that no messages are dropped.
+
+ GitHub issue: [rabbitmq-server#1471](https://github.com/rabbitmq/rabbitmq-server/issues/1471)
+
+ * Plugin activation could hang if a plugin's channel interceptor failed to activate with an exception.
+
+ GitHub issue: [rabbitmq-server#1477](https://github.com/rabbitmq/rabbitmq-server/pull/1477)
+
+ * During node boot, virtual host that are not yet initialised could be queried for status, which
+ resulted in log noise.
+
+ GitHub issue: [rabbitmq-server#1472](https://github.com/rabbitmq/rabbitmq-server/pull/1482)
+
+#### Enhancements
+
+ * [Internal authN backend](https://www.rabbitmq.com/access-control.html) will now prohibit logins with blank passwords. Such attempts
+ are usually made by mistake because a passwordless user that was meant to authenticate
+ using [x509 (TLS) certificates](https://github.com/rabbitmq/rabbitmq-auth-mechanism-ssl/) was not configured to use the correct [authentication
+ mechanism](https://www.rabbitmq.com/authentication.html).
+
+ Note that the same behavior can already be achieved using a [credential validator](https://www.rabbitmq.com/passwords.html), so this is
+ just an extra safety measure.
+
+ GitHub issue: [rabbitmq-server#1465](https://github.com/rabbitmq/rabbitmq-server/pull/1465)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Definition import now stores global parameters in a way that the MQTT plugin expects.
+
+ GitHub issue: [rabbitmq-management#528](https://github.com/rabbitmq/rabbitmq-management/issues/528)
+
+ * User update form now successfully updates tags for passwordless users (or if the password is cleared at the same time).
+
+ GitHub issue: [rabbitmq-management#533](https://github.com/rabbitmq/rabbitmq-management/issues/533)
+
+#### Enhancements
+
+ * Requeue is now the default option in the Get Messages section.
+
+ Contributed by @dparkinson.
+
+ GitHub issue: [rabbitmq-management#542](https://github.com/rabbitmq/rabbitmq-management/pull/542)
+
+ * For [passwordless users](https://www.rabbitmq.com/passwords.html), the "no password" option on the user editing form is now preselected.
+
+ GitHub issue: [rabbitmq-management#537](https://github.com/rabbitmq/rabbitmq-management/issues/537)
+
+
+### LDAP AuthN/AuthZ Backend Plugin
+
+#### Bug Fixes
+
+ * More config keys are now supported via [new style config format](https://www.rabbitmq.com/configure.html).
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#78](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/78)
+
+
+### Peer Discovery Consul Plugin
+
+ * When a node registers with Consul it will now set its initial health check status to `passing`.
+ Previously the node would be in the `critical` state until it first reported its health for the
+ first time (in 15 seconds with default settings), which could prevent its peers from discovering it.
+
+ Kudos to @andersenleo for investigating this issue and recommending a solution.
+
+ GitHub issue: [rabbitmq-peer-discovery-consul#8](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/issues/8)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * CLI tools honor `RABBITMQ_CTL_ERL_ARGS` again.
+
+ GitHub issue: [rabbitmq-server#1461](https://github.com/rabbitmq/rabbitmq-server/issues/1461)
+
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.3.tar.gz`.
diff --git a/release-notes/3.7.4.md b/release-notes/3.7.4.md
new file mode 100644
index 0000000000..82499b0493
--- /dev/null
+++ b/release-notes/3.7.4.md
@@ -0,0 +1,117 @@
+## RabbitMQ 3.7.4
+
+RabbitMQ `3.7.4` is a patch release that primarily includes bug fixes.
+
+### Upgrades and Compatibility
+
+This release has no known incompatibilities with earlier 3.7.x versions.
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades.
+
+See [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Priority queues could terminate when the node was under memory pressure and needed to
+ offload messages to disk.
+
+ GitHub issue: [rabbitmq-server#1535](https://github.com/rabbitmq/rabbitmq-server/issues/1535)
+
+ * queue leader locator strategy of `min-masters` was affected by queue binding count and could
+ select the wrong node.
+
+ GitHub issue: [rabbitmq-server#1519](https://github.com/rabbitmq/rabbitmq-server/issues/1519)
+
+ * It was not possible to set [randomized startup delay](https://www.rabbitmq.com/cluster-formation.html#initial-formation-race-condition) range via new style config file.
+
+ GitHub issue: [rabbitmq-server#1531](https://github.com/rabbitmq/rabbitmq-server/issues/1531)
+
+ * Windows scripts used `USE_LONGNAME` from the env file incorrectly.
+
+ GitHub issue: [rabbitmq-server#1508](https://github.com/rabbitmq/rabbitmq-server/issues/1508)
+
+ * systemd notification implementation was prone to shell escaping issues.
+
+ GitHub issue: [rabbitmq-server#1187](https://github.com/rabbitmq/rabbitmq-server/issues/1187)
+
+ * It wasn't possible to configure `delegate_count` via new style config file.
+
+ GitHub issue: [rabbitmq-server#1497](https://github.com/rabbitmq/rabbitmq-server/issues/1497)
+
+ * It wasn't possible to configure `net_ticktime` via new style config file.
+
+ GitHub issue: [rabbitmq-server#1522](https://github.com/rabbitmq/rabbitmq-server/issues/1522)
+
+#### Enhancement
+
+ * It is now possible to configure max number of Erlang processes and atoms using
+ `RABBITMQ_MAX_NUMBER_OF_PROCESSES` (`MAX_NUMBER_OF_PROCESSES` in `rabbitmq-env.conf`) and `RABBITMQ_MAX_NUMBER_OF_ATOMS`
+ (`MAX_NUMBER_OF_ATOMS`) environment variables, respectively.
+
+ GitHub issue: [rabbitmq-server#1528](https://github.com/rabbitmq/rabbitmq-server/pull/1528)
+
+ * `SCHEDULER_BIND_TYPE` and `DISTRIBUTION_BUFFER_SIZE` now can be used in `rabbitmq-env.conf` without
+ the `RABBITMQ_` prefix.
+
+ GitHub issue: [rabbitmq-server#1528](https://github.com/rabbitmq/rabbitmq-server/pull/1528)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmq-plugins set` failed when was used in offline mode (with `--offline`)
+
+ GitHub issue: [rabbitmq-cli#241](https://github.com/rabbitmq/rabbitmq-cli/issues/241)
+
+ * `rabbitmqctl list_*` command streaming now can be restricted to a set of client TCP ports
+ used to contact RabbitMQ nodes.
+
+ GitHub issue: [rabbitmq-cli#237](https://github.com/rabbitmq/rabbitmq-cli/issues/237)
+
+
+### Management Plugin
+
+#### Bugs
+
+ * Valid definitions could be rejected as invalid during import via HTTP API.
+
+ GitHub issue: [rabbitmq-management#552](https://github.com/rabbitmq/rabbitmq-management/issues/552)
+
+ * It wasn't possible to configure `path_prefix` via new style config file.
+
+ GitHub issue: [rabbitmq-management#547](https://github.com/rabbitmq/rabbitmq-management/issues/547)
+
+
+### Federation Plugin
+
+#### Bugs
+
+ * Valid upstreams were rejected as invalid when imported from a definition file.
+
+ GitHub issue: [rabbitmq-federation#70](https://github.com/rabbitmq/rabbitmq-federation/issues/70)
+
+ * Valid upstream sets were rejected as invalid when added using CLI tools.
+
+ GitHub issue: [rabbitmq-federation#67](https://github.com/rabbitmq/rabbitmq-federation/issues/67)
+
+
+### Peer Discovery AWS Plugin
+
+#### Bugs
+
+ * The plugin skipped registration and thus [randomized startup delay](https://www.rabbitmq.com/cluster-formation.html#initial-formation-race-condition) injection.
+
+ GitHub issue: [rabbitmq-peer-discovery-aws#17](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws/issues/17)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.4.tar.gz`.
diff --git a/release-notes/3.7.5.md b/release-notes/3.7.5.md
new file mode 100644
index 0000000000..62539fc2a0
--- /dev/null
+++ b/release-notes/3.7.5.md
@@ -0,0 +1,201 @@
+## RabbitMQ 3.7.5
+
+RabbitMQ `3.7.5` a maintenance release that primarily includes bug fixes.
+
+### Upgrades and Compatibility
+
+This release includes one **potential incompatibility** with earlier `3.7.x` releases. It will only affect applications that use a lot (at least hundreds) of channels per connection. Previous behavior can be restored via configuration.
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades.
+
+See [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Topic authorisation check could fail with an exception.
+
+ GitHub issue: [rabbitmq-server#1538](https://github.com/rabbitmq/rabbitmq-server/issues/1538)
+
+ * Some runtime parameters and operator policies were rejected during definition import.
+
+ GitHub issue: [rabbitmq-management#565](https://github.com/rabbitmq/rabbitmq-management/issues/565), [rabbitmq-federation#73](https://github.com/rabbitmq/rabbitmq-federation/issues/73)
+
+ * Queues could run into an exception under memory pressure.
+
+ GitHub issue: [rabbitmq-server#1582](https://github.com/rabbitmq/rabbitmq-server/issues/1582)
+
+ * During parallel cluster formation nodes could run into an exception when inserting seed data
+ (default virtual host, user and permissions).
+
+ GitHub issue: [rabbitmq-server#1556](https://github.com/rabbitmq/rabbitmq-server/pull/1556)
+
+#### Enhancements
+
+ * New `ha-promote-on-failure` queue mirroring setting to augment already existing `ha-promote-on-shutdown`.
+
+ GitHub issue: [rabbitmq-server#1578](https://github.com/rabbitmq/rabbitmq-server/pull/1578)
+
+ * `queue.delete` operations will now force delete queues that don't have a [promotable
+ master](https://www.rabbitmq.com/ha.html#unsynchronised-mirrors).
+
+ GitHub issue: [rabbitmq-server#1501](https://github.com/rabbitmq/rabbitmq-server/issues/1501)
+
+ * Lock contention in internal database is now much lower when a node with a lot of exclusive queues
+ shuts down or is otherwise considered to be unavailable by peers. This means fewer CPU cycles
+ spent cleaning up the exclusive queues.
+
+ GitHub issue: [rabbitmq-server#1570](https://github.com/rabbitmq/rabbitmq-server/pull/1570)
+
+ * Default [max number of channels allowed on a connection](https://www.rabbitmq.com/networking.html#tuning-for-large-number-of-connections-channel-max) (a.k.a. `channel_max`) has been lowered from `65535` to `2047`.
+ The new default is much safer and will reduce the effect application channel leaks have on node resource consumption. **This is a potentially breaking change**.
+
+ Systems where a lot (at least hundreds) of channels *per connection* are used should explicitly override `channel_max` in the [config file](https://www.rabbitmq.com/configure.html) to a higher suitable value, e.g.:
+
+ ```
+ channel_max = 4000
+ ```
+
+ GitHub issue: [rabbitmq-server#1593](https://github.com/rabbitmq/rabbitmq-server/issues/1593)
+
+ * [Ranch](https://github.com/ninenines/ranch) dependency has been updated to `1.5.x` which should prevent rare cases of mass termination of client connections.
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `-t` (`--timeout`) is now only displayed for commands that will actually use it.
+
+ GitHub issue: [rabbitmq-cli#236](https://github.com/rabbitmq/rabbitmq-cli/issues/236)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * `GET /api/overview` could fail if SNI for management plugin listener was configured.
+
+ GitHub issue: [rabbitmq-management#567](https://github.com/rabbitmq/rabbitmq-management/issues/567)
+
+ * `rabbitmqadmin` could run into an exception with certain formatting settings on Python 2.x.
+
+ GitHub issue: [rabbitmq-management#568](https://github.com/rabbitmq/rabbitmq-management/issues/568)
+
+ * HTTP API stats documentation used incorrect field names.
+
+ GitHub issue: [rabbitmq-management#561](https://github.com/rabbitmq/rabbitmq-management/pull/561)
+
+
+### LDAP Plugin
+
+#### Bug Fixes
+
+ * LDAP server-initiated connection closure was not handled gracefully by connection pool.
+ Kudos to Adam Gardner for [doing all the hard work](https://groups.google.com/forum/#!topic/rabbitmq-users/4Gva3h-yJzM) investigating this issue.
+
+ GitHub issues: [rabbitmq-auth-backend-ldap#82](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/82), [rabbitmq-auth-backend-ldap#90](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/90)
+
+ * `auth_ldap.idle_timeout` now defaults to 300 seconds instead of `infinity`.
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#81](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/81)
+
+ * `group_lookup_base` was not configurable via the new style config format.
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#85](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/85)
+
+ * `auth_ldap.log` didn't accept `network_unsafe` for severity value.
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#86](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/86)
+
+#### Enhancements
+
+ * It is now possible to configure TLS options for LDAP server connections via the [new style config format](https://www.rabbitmq.com/configure.html).
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#88](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/88)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Federation upstream sets could run into a type error with exchange federation.
+
+ GitHub issue: [rabbitmq-federation#75](https://github.com/rabbitmq/rabbitmq-federation/issues/75)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Valid dynamic Shovel properties can be rejected by validation.
+
+ GitHub issue: [rabbitmq-shovel#38](https://github.com/rabbitmq/rabbitmq-shovel/issues/38)
+
+ * Shovel names are again included into (application-provided) connection names and can therefore
+ be seen in the management UI.
+
+ GitHub issue: [rabbitmq-shovel#40](https://github.com/rabbitmq/rabbitmq-shovel/issues/40)
+
+
+### Peer Discovery AWS Plugin
+
+#### Bug Fixes
+
+ * China-based AWS regions now use a correct base API URI.
+
+ GitHub issue: [rabbitmq-peer-discovery-aws#23](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws/issues/23)
+
+ * The plugin could fail to discover all nodes in a returned instance set.
+
+ GitHub issue: [rabbitmq-peer-discovery-aws#20](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws/issues/20)
+
+
+### Peer Discovery Kubernetes Plugin
+
+#### Enhnacements
+
+ * The plugin now supports [randomized startup delay](https://www.rabbitmq.com/cluster-formation.html#initial-formation-race-condition) with much lower default values
+ since RSD is [not really necessary](https://www.rabbitmq.com/cluster-formation.html#peer-discovery-k8s) for stateful set deployments.
+
+ GitHub issue: [rabbitmq-peer-discovery-k8s#23](https://github.com/rabbitmq/rabbitmq-peer-discovery-k8s/issues/23)
+
+
+### AMQP 1.0 Client
+
+#### Bug Fixes
+
+ * It wasn't possible to send some messages using QPid JMS client over AMQP 1.0.
+
+ GitHub issue: [rabbitmq-amqp1.0#61](https://github.com/rabbitmq/rabbitmq-amqp1.0/issues/61)
+
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+ * New style config format was not interpreting IP addresses correctly.
+
+ GitHub issue: [rabbitmq-web-mqtt#26](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/26)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+ * It wasn't possible to configure TCP listener(s) in the new style config format.
+
+ GitHub issue: [rabbitmq-web-stomp#82](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/82)
+
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.5.tar.gz`.
diff --git a/release-notes/3.7.6.md b/release-notes/3.7.6.md
new file mode 100644
index 0000000000..0aaaceb4db
--- /dev/null
+++ b/release-notes/3.7.6.md
@@ -0,0 +1,122 @@
+## RabbitMQ 3.7.6
+
+RabbitMQ `3.7.6` a maintenance release that primarily includes bug fixes.
+
+### Upgrades and Compatibility
+
+This release includes one **potential incompatibility** with earlier `3.7.x` releases. Only systems
+that use a high number (hundreds) of [queue priorities](https://www.rabbitmq.com/priority.html) are affected.
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades.
+
+See [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Max priority cap for queues is now enforced and set to 255. Applications that rely on a higher number of priorities **will break**.
+ Such applications must be updated to use no more than 255 priorities.
+
+ GitHub issue: [rabbitmq-server#1590](https://github.com/rabbitmq/rabbitmq-server/issues/1590)
+
+ * Bulk deletion of exclusive queues when their owning connection closes has been optimized.
+
+ GitHub issues: [rabbitmq/rabbitmq-server#1566](https://github.com/rabbitmq/rabbitmq-server/issues/1566), [rabbitmq/rabbitmq-server#1513](https://github.com/rabbitmq/rabbitmq-server/issues/1513)
+
+
+#### Enhancements
+
+ * On Erlang 20.2.3 or later, more optimal memory allocators will be used. On some workloads that leads
+ to [significant reduction in node's RAM consumption](https://groups.google.com/d/msg/rabbitmq-users/LSYaac9frYw/LNZDZUlrBAAJ). The change is workload-specific, however, on the workloads
+ tested that do introduce a negative effect (more RAM used) the difference was very small (about 1%).
+
+
+ It is possible to go back to the previous settings, `+MBas aoffcbf +MHas aoffcbf +MBlmbcs 5120 +MHlmbcs 5120 +MMmcs 10`,
+ by specifying them in the `RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS` [environment variable](https://www.rabbitmq.com/configure.html#customise-environment).
+
+ On Erlang/OTP releases older than 20.2.3 there will be no default changes.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1604](https://github.com/rabbitmq/rabbitmq-server/pull/1604)
+
+ * [Logging to Syslog](https://www.rabbitmq.com/logging.html#logging-to-syslog) without 3rd party plugins.
+
+ GitHub issue: [rabbitmq-server#1600](https://github.com/rabbitmq/rabbitmq-server/pull/1600)
+
+ * `connection.closed` events now include client-provided connection name and more client details.
+ That information is in turn available via the [rabbitmq_event_exchange](https://github.com/rabbitmq/rabbitmq-event-exchange).
+
+ GitHub issue: [rabbitmq-server#1596](https://github.com/rabbitmq/rabbitmq-server/issues/1596)
+
+
+### CLI Tools
+
+#### Enhancements
+
+ * `await_online_nodes <count> [--timeout <timeout>]` is a new command that will wait until the target
+ node's cluster has at least `<count>` nodes. The command supports a timeout. It is meant to simplify
+ cluster provisioning automation and can be used as the very first check performed after starting a node.
+
+ Note that the command requires at least one node to be running and reachable.
+
+ GitHub issue: [rabbitmq-cli#235](https://github.com/rabbitmq/rabbitmq-cli/issues/235)
+
+ * `rabbitmq-diagnostics ping [--node <target> --timeout <timeout>]` is a new minimalistic health check command.
+ It only verifies if the target node is up, registered with epmd and accepts/authenticates CLI tool connections.
+ It fills the gap between `rabbitmq-diagnostics node_health_check`, which performs a lot of checks, take a lot more time
+ and is fairly opinionated in what it does. `ping` is more suitable in environments where
+ failing aliveness checks will result in node restarts, namely Kubernetes.
+
+ GitHub issue: [rabbitmq-cli#253](https://github.com/rabbitmq/rabbitmq-cli/issues/253)
+
+
+### Management and Management Agent Plugins
+
+#### Bug Fixes
+
+ * Queue metrics could be missing after a node restart or plugin [re-]enablement.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-agent#58](https://github.com/rabbitmq/rabbitmq-management-agent/issues/58)
+
+ * `rabbitmqadmin` now correctly percent-encodes spaces in connection names.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#576](https://github.com/rabbitmq/rabbitmq-management/issues/576)
+
+ * When creating a new vhost, user permissions will only be granted to the users that exist in the
+ internal database. If an external authentication/authorisation service is used, the step is skipped.
+
+ GitHub issue: [rabbitmq-management#531](https://github.com/rabbitmq/rabbitmq-management/issues/531)
+
+ * Virtual host imported from definitions or created via HTTP API will explicitly await for the vhost to initialise
+ on all cluster nodes.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#575](https://github.com/rabbitmq/rabbitmq-management/issues/575)
+
+
+### LDAP Plugin
+
+#### Bug Fixes
+
+ * LDAP connection pool now correctly purges connections in more failure scenarios (e.g. LDAP server restart)
+
+ GitHub issue: [rabbitmq-auth-backend-ldap#92](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/92)
+
+
+### HTTP Authentication/authorisation Plugin
+
+#### Enhancements
+
+ * `201 Created` responses are now accepted from HTTP services that back the plugin.
+
+ GitHub issue: [rabbitmq-auth-backend-http#68](https://github.com/rabbitmq/rabbitmq-auth-backend-http/pull/68)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.6.tar.gz`.
diff --git a/release-notes/3.7.7.md b/release-notes/3.7.7.md
new file mode 100644
index 0000000000..e4994c4fc6
--- /dev/null
+++ b/release-notes/3.7.7.md
@@ -0,0 +1,86 @@
+## RabbitMQ 3.7.7
+
+RabbitMQ `3.7.7` a maintenance release. This release introduces
+compatibility with [Erlang/OTP 21](http://www.erlang.org/news/123).
+
+### Upgrades and Compatibility
+
+This release has no known incompatibilities with earlier `3.7.x` releases.
+
+#### Upgrading to Erlang 21.0
+
+When upgrading to this release **and upgrading Erlang to 21.0** at the same time, extra care has to be taken.
+Since CLI tools from any earlier RabbitMQ release will fail on Erlang 21, RabbitMQ **must be upgraded before Erlang**.
+
+#### Documentation Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and
+compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Enhancements
+
+ * This release is compatible with [Erlang/OTP 21](http://www.erlang.org/news/123), the newest major
+ Erlang runtime release.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1616](https://github.com/rabbitmq/rabbitmq-server/issues/1616)
+
+ * Windows installer will now take default [Erlang cookie](https://www.rabbitmq.com/cli.html#erlang-cookie) location differences between Erlang versions into account.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#82](https://github.com/rabbitmq/rabbitmq-server-release/pull/82)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * It is now possible to configure Web browser session timeout. Default is still 8 hours.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#584](https://github.com/rabbitmq/rabbitmq-management/pull/584)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Direct connections downstream were not closed in some cases.
+
+ Contributed by Ricardo Gonçalves.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#76](https://github.com/rabbitmq/rabbitmq-federation/issues/76)
+
+
+### LDAP Plugin
+
+#### Bug Fixes
+
+ * It wasn't possible to set `auth_ldap.dn_lookup_bind` to the value of `anon` in the new style config format.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#94](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/94)
+
+### Consul Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * The following combination of settings was rejected by the plugin, even though they are reasonable
+ and were recommended by the docs:
+
+ ``` ini
+ cluster_formation.consul.svc_addr_auto = true
+ # en0 is just an example, it could be any other NIC name
+ cluster_formation.consul.svc_addr_nic = en0
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-consul#12](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/issues/12)
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.7.tar.gz`.
diff --git a/release-notes/3.7.8.md b/release-notes/3.7.8.md
new file mode 100644
index 0000000000..76270e4d86
--- /dev/null
+++ b/release-notes/3.7.8.md
@@ -0,0 +1,339 @@
+## RabbitMQ 3.7.8
+
+RabbitMQ `3.7.8` a maintenance release. It focuses on bug fixes and
+further improves [Erlang/OTP 21](http://www.erlang.org/news/123) compatibility.
+
+### Compatibility Notes
+
+This release has no known incompatibilities with earlier `3.7.x` releases.
+
+#### Upgrading to Erlang 21.0
+
+When upgrading to this release **and upgrading Erlang to 21.0** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21, RabbitMQ **must be upgraded before Erlang**.
+
+#### Documentation Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and
+compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Virtual host (specifically binding) recovery has been optimized.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1650](https://github.com/rabbitmq/rabbitmq-server/pull/1650)
+
+ * A binding removal optimization was reverted because it could result in custom exchange type state not cleaned up
+ as plugin authors intended.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1691](https://github.com/rabbitmq/rabbitmq-server/pull/1691)
+
+ * A queue sync operation now can be cancelled when a resource alarm is in effect on the receiving node.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1636](https://github.com/rabbitmq/rabbitmq-server/issues/1636)
+
+ * Inter-node communication port (a.k.a. distribution port) unintentionally used an excessively large
+ buffer size (1.2 GB instead of 128 MB). Kudos to Chris Friesen for noticing and reporting this.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1659](https://github.com/rabbitmq/rabbitmq-server/issues/1659)
+
+ * More idempotent binding removal.
+
+ When transactions that removed bindings were rolled back and retried, the code
+ that performed the deletion could fail with a misleading assertion failure, `{error, not_found}`.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1689](https://github.com/rabbitmq/rabbitmq-server/pull/1689)
+
+ * A resource alarm triggered during boot could prevent node from starting.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1644](https://github.com/rabbitmq/rabbitmq-server/issues/1644)
+
+ * Environments with high queue or channel churn could run into an exception in local process group.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1699](https://github.com/rabbitmq/rabbitmq-server/issues/1699)
+
+ * Priority queues no longer fail with an exception when used together with other `rabbit_backing_queue`
+ behaviour implementations.
+
+ Contributed by Matteo Cafasso.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1669](https://github.com/rabbitmq/rabbitmq-server/pull/1669)
+
+ * Syslog configuration parameters could be rejected by config validator.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1639](https://github.com/rabbitmq/rabbitmq-server/issues/1639)
+
+ * `rabbitmq-echopid.bat` now loads `rabbitmq-env.bat` correctly.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1642](https://github.com/rabbitmq/rabbitmq-server/issues/1642)
+
+#### Enhancements
+
+ * Improved config file validation and loading. For example, `advanced.config` now can be used
+ without the main `rabbitmq.conf` config file.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1671](https://github.com/rabbitmq/rabbitmq-server/pull/1671)
+
+ * On Erlang/OTP 21 Syslog server now can be configured using a hostname (not just an IP address).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1658](https://github.com/rabbitmq/rabbitmq-server/pull/1658)
+
+ * `rabbitmqctl rename_cluster_node` documentation improvements.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1641](https://github.com/rabbitmq/rabbitmq-server/pull/1641)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * Policies were unintentionally excluded from `rabbitmqctl report` output.
+
+ Contributed by Ayanda Dube.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#256](https://github.com/rabbitmq/rabbitmq-cli/pull/256)
+
+ * Global parameters were unintentionally excluded from `rabbitmqctl report` output.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#258](https://github.com/rabbitmq/rabbitmq-cli/pull/258)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Virtual host imported from definitions or created via HTTP API will explicitly await for the vhost to initialise
+ on all cluster nodes. This change was previously scheduled for `3.7.6` but was only partially applied (didn't have any effect on HTTP API clients).
+
+ GitHub issues: [rabbitmq/rabbitmq-management#575](https://github.com/rabbitmq/rabbitmq-management/issues/575), [rabbitmq/rabbitmq-management#591](https://github.com/rabbitmq/rabbitmq-management/pull/591)
+
+ * HTTP API connection endpoints returned a duplicate key, `node`.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#601](https://github.com/rabbitmq/rabbitmq-management/issues/601)
+
+ * Response compression wasn't enabled by default for compatible clients.
+
+ * Failed login error message now clears automatically after successful login.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#609](https://github.com/rabbitmq/rabbitmq-management/issues/609)
+
+#### Enhancements
+
+ * Cowboy listener settings now can be configured using new style config file.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#596](https://github.com/rabbitmq/rabbitmq-management/issues/596)
+
+ * Cowboy timeouts now can be configured.
+
+ GitHub issues: [rabbitmq/rabbitmq-management#594](https://github.com/rabbitmq/rabbitmq-management/issues/594)
+
+ * `rabbitmqadmin` now supports a new argument, `--request-timeout`.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#593](https://github.com/rabbitmq/rabbitmq-management/issues/593)
+
+ * Switching sections will now scroll to the top of the newly rendered page.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#590](https://github.com/rabbitmq/rabbitmq-management/pull/590)
+
+
+### Consistent Hashing Exchange Plugin
+
+#### Enhancements
+
+ * Message distribution uniformity has been significantly improved. [Jump Consistent Hash](https://arxiv.org/abs/1406.2294) is now used for
+ the hashing function.
+
+ GitHub issue: [rabbitmq/rabbitmq-consistent-hash-exchange#37](https://github.com/rabbitmq/rabbitmq-consistent-hash-exchange/issues/37)
+
+
+### STOMP Plugin
+
+#### Bug Fixes
+
+ * Nodes experiencing very high connection churn could run out of Erlang processes.
+
+ GitHub issues: [rabbitmq/rabbitmq-stomp#125](https://github.com/rabbitmq/rabbitmq-stomp/pull/125), [rabbitmq/rabbitmq-server#1640](https://github.com/rabbitmq/rabbitmq-server/pull/1640)
+
+
+### Web STOMP Plugin
+
+#### Enhancements
+
+ * WebSocket endpoint path is now configurable.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#86](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/86)
+
+
+### MQTT Plugin
+
+ * Session presence flag was always set to `true` regardless of the state of client connection session.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#163](https://github.com/rabbitmq/rabbitmq-mqtt/issues/163)
+
+
+### Web MQTT Plugin
+
+#### Enhancements
+
+ * WebSocket endpoint path is now configurable.
+
+ Contributed by Thomas Sauzeau.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#30](https://github.com/rabbitmq/rabbitmq-web-mqtt/pull/30)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * The plugin could try to try to forward messages consumed from the source before receiving
+ credit from the AMQP 1.0 destination.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#44](https://github.com/rabbitmq/rabbitmq-shovel/pull/44)
+
+#### Enhancements
+
+* `sasl=plain` is now added to connection URIs if no other mechanism is specified explicitly
+ by the user.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#10](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/issues/10)
+
+ * TLS version for AMQP 1.0 endpoints now can be configured via a connection URI query parameter.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#19](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/issues/19)
+
+ * Specifying a port without a hostname in connection URI is now considered
+ to be an unsupported combination and will throw an error.
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#107](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/107)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Improved OTP release handling compatibility.
+
+ Contributed by Ayanda Dube.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#78](https://github.com/rabbitmq/rabbitmq-federation/pull/78)
+
+#### Enhancements
+
+ * Link initialization optimizations.
+
+ Contributed by Ayanda Dube.
+
+ GitHub issues: [rabbitmq/rabbitmq-federation#79](https://github.com/rabbitmq/rabbitmq-federation/pull/79), [rabbitmq/rabbitmq-federation#80](https://github.com/rabbitmq/rabbitmq-federation/pull/80)
+
+ * Specifying a port without a hostname in connection URI is now considered
+ to be an unsupported combination and will throw an error.
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#107](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/107)
+
+
+### Peer Discovery Etcd Plugin
+
+#### Bug Fixes
+
+ * Node name is now correctly extracted from the keys returned by
+ etcd even when key prefix includes slashes.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#14](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/issues/14)
+
+
+### Debian Package
+
+#### Bug Fixes
+
+ * Package installation scripts will make sure that the `/etc/rabbitmq` directory is readable.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#72](https://github.com/rabbitmq/rabbitmq-server-release/pull/72)
+
+
+### RPM Package
+
+#### Bug Fixes
+
+ * Package installation scripts will make sure that the `/etc/rabbitmq` directory is readable.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#72](https://github.com/rabbitmq/rabbitmq-server-release/pull/72)
+
+ * If `rabbitmq-plugins` is invoked before first server start, Erlang cookie permissions will
+ be correctly set to those of the RabbitMQ user and group (currently `rabbitmq`).
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#85](https://github.com/rabbitmq/rabbitmq-server-release/issues/85)
+
+
+### Event Exchange Plugin
+
+#### Bug Fixes
+
+ * `amq.rabbitmq.event` exchange is now deleted when plugin is disabled.
+
+ GitHub issue: [rabbitmq/rabbitmq-event-exchange#33](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/33)
+
+#### Enhancements
+
+ * The plugin now can be configured using new style config file:
+
+``` ini
+rabbitmq_event_exchange.vhost = audit
+```
+
+ GitHub issue: [rabbitmq/rabbitmq-event-exchange#31](https://github.com/rabbitmq/rabbitmq-event-exchange/issues/31)
+
+
+### Tracing Plugin
+
+#### Enhancements
+
+ * It is now possible to set up a tracer on any cluster node via management UI and HTTP API.
+
+ GitHub issue: [rabbitmq/rabbitmq-tracing#24](https://github.com/rabbitmq/rabbitmq-tracing/pull/24)
+
+ * Tracer connection credentials now can be specified via the management UI and HTTP API.
+
+ GitHub issue: [rabbitmq/rabbitmq-tracing#25](https://github.com/rabbitmq/rabbitmq-tracing/pull/25)
+
+
+### AMQP 1.0 Erlang Client
+
+#### Enhancements
+
+ * `sasl=plain` is now added to connection URIs if no other mechanism is specified explicitly
+ by the user.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#10](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/issues/10)
+
+ * `hostname` connection parameter will now be set to the value of the `address` parameter for improved
+ compatibility with AMQP 1.0 implementations (e.g. Azure ServiceBus) that expect it to be set.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#1](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/issues/1)
+
+ * TLS version now can be configured via a connection URI query parameter.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#19](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/issues/19)
+
+### AMQP 0-9-1 Erlang Client
+
+#### Enhancements
+
+ * Specifying a port without a hostname in connection URI is now considered
+ to be an unsupported combination and will throw an error.
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#107](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/107)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-3.7.8.tar.gz`.
diff --git a/release-notes/3.7.9.md b/release-notes/3.7.9.md
new file mode 100644
index 0000000000..bf8319ba73
--- /dev/null
+++ b/release-notes/3.7.9.md
@@ -0,0 +1,251 @@
+## RabbitMQ 3.7.9
+
+RabbitMQ `3.7.9` is a maintenance release. It focuses on bug fixes and minor usability improvements.
+
+### Compatibility Notes
+
+CLI tools in this release will often produce an extra line of output, as they did in `3.6.x` releases,
+when `-q` is not provided. Tools that parse CLI command output should use `-q --no-table-headers` to suppress all additional output meant for interactive use or
+avoid parsing output entirely (e.g. use the HTTP API).
+
+#### Upgrading to Erlang 21.0
+
+When upgrading to this release **and upgrading Erlang to 21.0** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21, RabbitMQ **must be upgraded before Erlang**.
+
+#### Documentation Guides and Change Log
+
+See [3.7.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.0) upgrade and
+compatibility notes if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Queue deletion loaded bindings in an inefficient way.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1690](https://github.com/rabbitmq/rabbitmq-server/issues/1690)
+
+ * Heartbeat monitor now correctly sends heartbeats at half the negotiated timeout interval.
+ It previously could fail to do so because it considered its own traffic to be on-the-wire
+ activity from the peer.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#278](https://github.com/rabbitmq/rabbitmq-common/pull/278)
+
+ * Nodes were using a [client] Syslog UDP socket even when [logging to Syslog](https://www.rabbitmq.com/logging.html#logging-to-syslog) was not enabled.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1718](https://github.com/rabbitmq/rabbitmq-server/issues/1718)
+
+ * `ERL_EPMD_PORT` was ignored when configured in `rabbitmq-env.conf`.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1709](https://github.com/rabbitmq/rabbitmq-server/issues/1709)
+
+ * Proxy Protocol dependency is now compatible with Erlang/OTP 21.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#269](https://github.com/rabbitmq/rabbitmq-common/issues/269)
+
+#### Enhancements
+
+ * It is now possible to configure [cipher suites](https://www.rabbitmq.com/ssl.html#configuring-ciphers) using new style config format.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1712](https://github.com/rabbitmq/rabbitmq-server/issues/1712)
+
+ * When a listener fails to start (bind to a server socket), error messages involve less context and are
+ easier to read.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1711](https://github.com/rabbitmq/rabbitmq-server/issues/1711)
+
+ * Improved error reporting for when `erl` or `erl.exe` are no in node's `PATH`.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#275](https://github.com/rabbitmq/rabbitmq-common/pull/275)
+
+ * 10 TLS connection acceptors are now used by default.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1729](https://github.com/rabbitmq/rabbitmq-server/issues/1729)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl list_*` commands did not include table column headers.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#264](https://github.com/rabbitmq/rabbitmq-cli/issues/264)
+
+ * If `RABBITMQ_NODENAME` is configured, CLI tools will use its hostname part when generating its own
+ Erlang node name.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#270](https://github.com/rabbitmq/rabbitmq-cli/issues/270)
+
+ * On Windows CLI tool batch scripts exited with a 0 exit code when it failed to connect to the node.
+
+ Contributed by Artem Zinenko.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1735](https://github.com/rabbitmq/rabbitmq-server/issues/1735).
+
+#### Enhancements
+
+ * `rabbitmqctl stop` now supports `--idempotent` that makes the command exit with a success when
+ target node is not running.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#272](https://github.com/rabbitmq/rabbitmq-cli/pull/272)
+
+ * `rabbitmqctl add_vhost` is now idempotent (no longer returns an error when vhost already exists)
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#260](https://github.com/rabbitmq/rabbitmq-cli/issues/260)
+
+
+### Management and Management Agent Plugins
+
+#### Bug Fixes
+
+ * Logo link now works better with a non-blank API endpoint prefix.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#604](https://github.com/rabbitmq/rabbitmq-management/issues/604)
+
+ * Decimal headers and argument values are now serialised to JSON correctly.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-agent#71](https://github.com/rabbitmq/rabbitmq-management-agent/pull/71)
+
+#### Enhancements
+
+ * It is now possible to configure both HTTPS and HTTP listeners using new syntax that's consistent with
+ Web STOMP and Web MQTT plugins:
+
+ ``` ini
+ management.tcp.port = 15672
+
+ management.ssl.port = 15671
+ management.ssl.cacertfile = /path/to/ca_certificate.pem
+ management.ssl.certfile = /path/to/server_certificate.pem
+ management.ssl.keyfile = /path/to/server_key.pem
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-management#563](https://github.com/rabbitmq/rabbitmq-management/issues/563)
+
+ * It is now possible to configure `Content-Security-Policy` (CSP) header set by the API.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#623](https://github.com/rabbitmq/rabbitmq-management/issues/623)
+
+ * It is now possible to configure `Strict-Transport-Policy` (HSTS) header set by the API.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#624](https://github.com/rabbitmq/rabbitmq-management/issues/624)
+
+ * `GET /api/nodes/{node}` endpoint aggregated data for other cluster nodes only to discard it later.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#615](https://github.com/rabbitmq/rabbitmq-management/issues/615)
+
+ * When `Handle.exe` is used and returns no file handle information in its output, a warning will be logged.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-agent#67](https://github.com/rabbitmq/rabbitmq-management-agent/issues/67)
+
+
+### LDAP Plugin
+
+#### Enhancements
+
+ * String matching queries now support multi-value results.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#100](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/100)
+
+ * `ad_domain` and `ad_user` are new variables available in LDAP plugin queries. They are extracted from the username
+ when it's in `Domain\User` format, which is typically specific to ActiveDirectory.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#98](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/98)
+
+ * Search queries that return referrals will result in an error instead of an exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#97](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/97)
+
+
+### Web STOMP Plugin
+
+#### Enhancements
+
+ * Advanced WebSocket options now can be configured. Compression is enabled by default.
+ Compression won't be used with clients that do not support it.
+
+ GitHub issues: [rabbitmq/rabbitmq-web-stomp#89](https://github.com/rabbitmq/rabbitmq-web-stomp/pull/89), [rabbitmq/rabbitmq-web-stomp#90](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/90)
+
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+ * WebSocket `PING` frames are now ignored instead of being propagated to MQTT frame handler.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#32](https://github.com/rabbitmq/rabbitmq-web-mqtt/pull/32)
+
+#### Enhancements
+
+* Advanced WebSocket options now can be configured. Compression is enabled by default.
+ Compression won't be used with clients that do not support it.
+
+ GitHub issues: [rabbitmq/rabbitmq-web-mqtt#34](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/34)
+
+
+### AWS Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * EC2 API endpoint requests used an unreasonably low timeout (100 ms). The new value
+ is 10 seconds.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-aws#24](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws/issues/24)
+
+
+### Consul Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * It wasn't possible to specify Consul service tags via new style config format.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-consul#18](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/issues/18)
+
+ * It wasn't possible to configure lock key prefix via new style config format.
+
+ GitHub issues: [rabbitmq/rabbitmq-peer-discovery-consul#16](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/issues/16)
+
+#### Enhancements
+
+* Lock acquisition timeout now can be configured using `cluster_formation.consul.lock_timeout` as well as `cluster_formation.consul.lock_wait_time` (an alias),
+ to be consistent with the Etcd implementation.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-consul#20](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/pull/20)
+
+
+### Etcd Peer Discovery Plugin
+
+#### Enhancements
+
+ * Lock acquisition timeout now can be configured using `cluster_formation.etcd.lock_timeout` as well as `cluster_formation.etcd.lock_wait_time` (an alias),
+ to be consistent with the Consul implementation.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#16](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/pull/16)
+
+
+### Event Exchange Plugin
+
+#### Enhancements
+
+ * Throughput optimizations reduce probability of high memory consumption by `rabbit_event` processes due to
+ event backlog accumulation.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1722](https://github.com/rabbitmq/rabbitmq-server/issues/1722)
+
+
+### RPM Package
+
+#### Bug Fixes
+
+ * Post-installation script renamed `rabbitmq.conf` to `rabbitmq-env.conf`. A long time ago `rabbitmq.conf` was used to configure
+ environment variables (like `rabbitmq-env.conf` today) and old post-installation steps were not removed when `rabbitmq.conf`
+ was re-adopted for new style config files.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#87](https://github.com/rabbitmq/rabbitmq-server-release/issues/87)
diff --git a/release-notes/3.8.0.md b/release-notes/3.8.0.md
new file mode 100644
index 0000000000..de3c946ace
--- /dev/null
+++ b/release-notes/3.8.0.md
@@ -0,0 +1,288 @@
+## RabbitMQ 3.8.0
+
+RabbitMQ `3.8.0` is a feature release. It contains several major improvements in areas of
+data safety, replication, observability, and ease of upgrades. Some highlight features are
+
+ * [Quorum Queues](http://next.rabbitmq.com/quorum-queues.html)
+ * Built-in [Prometheus support](http://next.rabbitmq.com/prometheus.html) with a set of Grafana dashboards to complement it
+ * [Feature Flags](http://next.rabbitmq.com/feature-flags.html)
+ * [Single Active Consumer](http://next.rabbitmq.com/consumers.html#single-active-consumer)
+ * New authentication and authorisation [backend that uses OAuth 2.0 (JWT)](https://github.com/rabbitmq/rabbitmq-auth-backend-oauth2) tokens and scopes
+
+A recorded webinar, [What's New in RabbitMQ 3.8](https://content.pivotal.io/webinars/may-23-what-s-new-in-rabbitmq-3-8-webinar), covers some of the highlights in this release.
+
+Several features in this release are backed by a new Raft implementation for Erlang and Elixir,
+[Ra](https://github.com/rabbitmq/ra/).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.x` series is recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+
+### Rolling Upgrades to 3.8
+
+RabbitMQ 3.8.0 nodes can run alongside `3.7.18` or later `3.7.x` nodes.
+No 3.8.0-specific features would be available in a mixed version cluster. Mixed versions are meant
+to simplify rolling upgrades and not meant to be running for long periods of time.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for documentation on upgrades.
+
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release **and upgrading Erlang to 21.x or later** at the same time, extra care has to be taken.
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded before Erlang**.
+
+#### Upgrade Doc Guides and Change Log
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+#### Client Library Compatibility
+
+Client libraries that were compatible with RabbitMQ `3.7.x` will be compatible with `3.8.0`.
+
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Enhancements
+
+ * [Quorum Queues](http://next.rabbitmq.com/quorum-queues.html) built on top of the [Raft consensus algorithm](https://raft.github.io/) for data safety,
+ more predictable failure recovery, more efficient synchronisation of new and recovered followers, and parallel replication.
+
+ * [Feature Flags](http://next.rabbitmq.com/feature-flags.html) allow for mixed-version clusters and safer rolling upgrades.
+
+ * [Single Active Consumer](http://next.rabbitmq.com/consumers.html#single-active-consumer) makes it possible
+ to run a set of consumers for redundancy while ensuring that only one consumer is getting deliveries.
+
+ * New metric: dropped unroutable messages. Spotting faulty publishers and routing topology issues is now easier.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1904](https://github.com/rabbitmq/rabbitmq-server/issues/1904)
+
+ * New metrics: [connection](https://www.rabbitmq.com/connections.html#monitoring) and [channel churn](https://www.rabbitmq.com/channels.html#monitoring).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1723](https://github.com/rabbitmq/rabbitmq-server/issues/1723)
+
+ * Quorum queues support redelivery tracking, which can be used by consumers to protect themselves from
+ poison message redeliveries.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#502](https://github.com/rabbitmq/rabbitmq-server/issues/502)
+
+ * New queue overflow behaviour: `reject-publish-dlx`. It is identical to `reject-publish`
+ but also dead letters rejected messages.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1443](https://github.com/rabbitmq/rabbitmq-server/issues/1443)
+
+ * Default bindings are now explicit instead of being rows in the internal data store. That means
+ that high queue churn results in significantly less binding churn, reducing lock contention on
+ schema database tables, and peak schema operation latencies with it.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1721](https://github.com/rabbitmq/rabbitmq-server/pull/1721)
+
+#### Internal API Changes
+
+ * [`amqqueue`](https://github.com/rabbitmq/rabbitmq-server/blob/master/src/amqqueue.erl) is a new public API module
+ that should be used to access queue state instead of using queue state records directly. This allows the record
+ to evolve a lot more rapidly with fewer or no code changes in the plugins that access queue state.
+
+ * `authn` and `authz` functions now have access to additional (e.g. protocol-specific) context information.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1767](https://github.com/rabbitmq/rabbitmq-server/issues/1767)
+
+ * Backing queue interface now exposes a function for message deduplication. Plugins now can track duplicate messages
+ in a way that makes it possible to negatively confirm duplicates back to the publisher.
+
+ Contributed by Matteo Cafasso.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1774](https://github.com/rabbitmq/rabbitmq-server/pull/1774).
+
+#### Usability
+
+ * New style configuration format is now more forgiving: it will ignore lines that only contain
+ whitespace and tab characters, as well as leading and trailing whitespace.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2072](https://github.com/rabbitmq/rabbitmq-server/pull/2072)
+
+ * Maximum message size is now configurable. The default was reduced to 128 MiB.e
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1812](https://github.com/rabbitmq/rabbitmq-server/pull/1812)
+
+#### Bug Fixes
+
+Most bug fixes in this release previously shipped in [`3.7.x` release series](https://rabbitmq.com/changelog.html).
+The list below contains community contributions and most important issues that were not backported to `3.7.x` releases.
+
+ * Queue index consistency and safety improvements.
+
+ Contributed by @tomyouyou.
+
+ GitHub issues: [rabbitmq/rabbitmq-server#2092](https://github.com/rabbitmq/rabbitmq-server/pull/2092), [rabbitmq/rabbitmq-server#2096](https://github.com/rabbitmq/rabbitmq-server/pull/2096), [rabbitmq/rabbitmq-server#2100](https://github.com/rabbitmq/rabbitmq-server/pull/2100).
+
+
+### CLI Tools
+
+#### Enhancements
+
+ * More user-friendly `rabbitmq-diagnostics status` output.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#340](https://github.com/rabbitmq/rabbitmq-cli/issues/340)
+
+ * New `help` command.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#316](https://github.com/rabbitmq/rabbitmq-cli/pull/316)
+
+ * A new set of fine-grained [health check](https://www.rabbitmq.com/monitoring.html#health-checks) commands.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#292](https://github.com/rabbitmq/rabbitmq-cli/issues/292)
+
+ * New tool, `rabbitmq-queues`, with commands that display Raft state metrics and manage nodes that
+ host quorum queue replicas.
+
+ GitHub issues: [rabbitmq/rabbitmq-cli#287](https://github.com/rabbitmq/rabbitmq-cli/issues/287), [rabbitmq/rabbitmq-cli#286](https://github.com/rabbitmq/rabbitmq-cli/issues/286)
+
+ * Feature flag status reporting.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#346](https://github.com/rabbitmq/rabbitmq-cli/issues/346)
+
+#### Usability
+
+ * `rabbitmq-diagnostics cipher_suites` now uses OpenSSL cipher suite format by default.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#267](https://github.com/rabbitmq/rabbitmq-cli/issues/267)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * Metric collection and visualisation in management UI now can be disabled in favor of built-in
+ Prometheus support and Grafana dashboard from Team RabbitMQ.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#707](https://github.com/rabbitmq/rabbitmq-management/pull/707)
+
+ * Feature flag management interface.
+
+ [rabbitmq/rabbitmq-management#648](https://github.com/rabbitmq/rabbitmq-management/pull/648)
+
+ * Quorum Queue support.
+
+ * Single Active Consumer support.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#650](https://github.com/rabbitmq/rabbitmq-management/pull/650)
+
+ * Support for more TLS options.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#644](https://github.com/rabbitmq/rabbitmq-management/pull/644)
+
+ * OAuth 2.0 Single Sign On support for [UAA](https://docs.cloudfoundry.org/concepts/architecture/uaa.html)
+
+ GitHub issue: [rabbitmq/rabbitmq-management#722](https://github.com/rabbitmq/rabbitmq-management/pull/722)
+
+ * Username and password-based Basic HTTP Auth authentication for API operations now can be disabled
+ in favor of OAuth 2.0 Single Sign On via [UAA](https://docs.cloudfoundry.org/concepts/architecture/uaa.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-management#724](https://github.com/rabbitmq/rabbitmq-management/pull/724)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+Most bug fixes in this release previously shipped in [`3.7.x` release series](https://rabbitmq.com/changelog.html).
+The list below contains most prominent fixes.
+
+ * Sensitive values in Shovel connection state (namely, the connection credentials) are now stored in
+ encrypted form. This avoids unintentional credential logging by the runtime (exception logger)
+ at the cost of making troubleshooting authentication failures harder.
+
+ GitHub issue: [rabbitmq-erlang-client#123](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/123)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+Most bug fixes in this release previously shipped in [`3.7.x` release series](https://rabbitmq.com/changelog.html).
+The list below contains most prominent fixes.
+
+ * Sensitive values in federation link state (namely, the connection credentials) are now stored in
+ encrypted form. This avoids unintentional credential logging by the runtime (exception logger)
+ at the cost of making troubleshooting authentication failures harder.
+
+ GitHub issue: [rabbitmq-erlang-client#123](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/123)
+
+
+### JWT and OAuth 2.0 Plugin
+
+Initial release.
+
+GitHub repository: [rabbitmq/rabbitmq-auth-backend-oauth2](https://github.com/rabbitmq/rabbitmq-auth-backend-oauth2)
+
+
+### MQTT Plugin
+
+#### Enhancements
+
+ * Client ID tracking is now cluster-wide (state is replicated across all nodes). A majority of nodes is required
+ for client connections to be accepted. This is a consequence of the neew consistency-oriented design.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#91](https://github.com/rabbitmq/rabbitmq-mqtt/issues/91)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+Most bug fixes in this release previously shipped in [`3.7.x` release series](https://rabbitmq.com/changelog.html).
+The list below contains most prominent fixes.
+
+ * Maximum concurrent client connection limit now defaults to "infinity" (so, there is no limit).
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#113](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/113)
+
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+Most bug fixes in this release previously shipped in [`3.7.x` release series](https://rabbitmq.com/changelog.html).
+The list below contains most prominent fixes.
+
+ * Maximum concurrent client connection limit now defaults to "infinity" (so, there is no limit).
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#28](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/28)
+
+
+### RabbitMQ Erlang Client
+
+#### Bug Fixes
+
+Most bug fixes in this release previously shipped in [`3.7.x` release series](https://rabbitmq.com/changelog.html).
+The list below contains most prominent fixes.
+
+ * Sensitive values in connection state (namely, the connection credentials) are now stored in
+ encrypted form. This avoids unintentional credential logging by the runtime (exception logger)
+ at the cost of making troubleshooting authentication failures harder.
+
+ GitHub issue: [rabbitmq-erlang-client#123](https://github.com/rabbitmq/rabbitmq-erlang-client/issues/123)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.8.0.tar.xz`.
diff --git a/release-notes/3.8.1.md b/release-notes/3.8.1.md
new file mode 100644
index 0000000000..4415c7e9e2
--- /dev/null
+++ b/release-notes/3.8.1.md
@@ -0,0 +1,219 @@
+## RabbitMQ 3.8.1
+
+RabbitMQ `3.8.1` is a maintenance release that focuses on bug fixes.
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.x` series is recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release from `3.7.6` or an older version, extra care has to be taken.
+
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded at the same time as Erlang**. Alternatively the node can be upgraded
+to `3.7.18` first, then Erlang 21.x or 22.x, then to RabbitMQ 3.8.x.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Bug Fixes
+
+ * Rolling cluster upgrade could fail if new versions were deployed to all cluster nodes at once instead
+ of a rolling upgrade-then-restart for each node individually.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2132](https://github.com/rabbitmq/rabbitmq-server/issues/2132)
+
+ * Avoid pattern expansion when logging connection closure reason.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2155](https://github.com/rabbitmq/rabbitmq-server/pull/2155)
+
+ * Improved error handling in a module that continuously registers the node with [epmd](https://www.rabbitmq.com/clustering.html) avoids
+ log noise.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2130](https://github.com/rabbitmq/rabbitmq-server/issues/2130)
+
+#### Enhancements
+
+ * Peak [quorum queue](https://www.rabbitmq.com/quorum-queues.html) memory usage was reduced by up to 25% on some workloads.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2138](https://github.com/rabbitmq/rabbitmq-server/pull/2138)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmqctl await_startup` failed with an exception when effective timeout was set to `infinity`.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2144](https://github.com/rabbitmq/rabbitmq-server/pull/2144)
+
+ * `rabbitmq-diagnostics check_port_connectivity` produced a false positive in an IPv6-only environment.
+
+ Contributed by Gabriele Santomaggio.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#385](https://github.com/rabbitmq/rabbitmq-cli/pull/385)
+
+ * `rabbitmq-diagnostics list_unresponsive_queues` failed in environments that had quorum queues.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#386](https://github.com/rabbitmq/rabbitmq-cli/issues/386)
+
+ * `rabbitmq-diagnostics status`, `rabbitmq-diagnostics cluster_status`, `rabbitmq-diagnostics listeners` now support
+ `--formatter=erlang` (raw Erlang data structure output)
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#383](https://github.com/rabbitmq/rabbitmq-cli/issues/383)
+
+#### Enhancements
+
+ * `rabbitmq-diagnostics consume_event_stream` is a new command that makes it easier to consume a stream
+ of internal commands. This can be useful for troubleshooting and auditing. Previously this was only
+ possible via the [rabbitmq-event-exchange](https://github.com/rabbitmq/rabbitmq-event-exchange) plugin.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#299](https://github.com/rabbitmq/rabbitmq-cli/issues/299)
+
+ * `rabbitmq-diagnostics check_certificate_expiration` is a new [health check](https://www.rabbitmq.com/monitoring.html#health-checks) command
+ that fails when any of the certificates used by target node expire within a specified time window.
+
+ `rabbitmq-diagnostics certificates` is its diagnostics (informative) counterpart that displays all CA and leaf certificates
+ used by target node.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#305](https://github.com/rabbitmq/rabbitmq-cli/issues/305)
+
+
+### Prometheus Plugin
+
+#### Bug Fixes
+
+ * Prometheus scraping API endpoint was unreasonably strict about the set of content types it accepts from clients,
+ which caused Telegraf Prometheus input requests to fail with an`406 Not Acceptable` response.
+
+ GitHub issue: [rabbitmq/rabbitmq-prometheus#12](https://github.com/rabbitmq/rabbitmq-prometheus/issues/12)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Some metrics were omitted in the UI for queues that did not have `x-queue-type` set.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#741](https://github.com/rabbitmq/rabbitmq-management/issues/741)
+
+
+### LDAP Plugin
+
+#### Enhancements
+
+ * Errors were not propagated back to the client correctly in case of an LDAP server authentication failure.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#116](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/issues/116)
+
+
+### Kubernetes Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * Requests to Kubernetes API endpoints failed with an `nxdomain` (domain name resolution failure)
+ in an IPv6-only environment.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-k8s#55](https://github.com/rabbitmq/rabbitmq-peer-discovery-k8s/issues/55)
+
+
+### Consul Peer Discovery Plugin
+
+#### Enhancements
+
+ * It is now possible to specify service metadata values:
+
+ ``` ini
+ cluster_formation.consul.svc_meta.key1 = value1
+ cluster_formation.consul.svc_meta.key2 = value2
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-consul#34](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/issues/34)
+
+
+### MQTT Plugin
+
+ * Client ID tracking is now more resilient to node failures and decomissioning.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#213](https://github.com/rabbitmq/rabbitmq-mqtt/issues/213)
+
+
+### STOMP Plugin
+
+#### Enhancements
+
+ * The [`x-queue-type` header](https://www.rabbitmq.com/quorum-queues.html) is now accepted from STOMP clients.
+
+ GitHub issue: [rabbitmq/rabbitmq-stomp#138](https://github.com/rabbitmq/rabbitmq-stomp/issues/138)
+
+#### Bug Fixes
+
+ * `stomp.hide_server` is now available in the new style configuration format.
+
+ GitHub issue: [rabbitmq/rabbitmq-stomp#140](https://github.com/rabbitmq/rabbitmq-stomp/issues/140)
+
+
+### Web STOMP Plugin
+
+#### Bug Fixes
+
+ * The plugin emitted a warning on start.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#115](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/115)
+
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+ * The plugin emitted a warning on start.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#59](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/59)
+
+
+### Erlang Client
+
+#### Bug Fixes
+
+ * Connection could not be restarted after a heartbeat timeout due to strict pattern matching.
+
+ Contributed by Giuseppe D'Anna (@larrywax).
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#126](https://github.com/rabbitmq/rabbitmq-erlang-client/pull/126)
+
+
+### Tracing Plugin
+
+#### Bug Fixes
+
+ * On Windows, deleting the file used by an active (running) trace resulted in an `EACCES` exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-tracing#31](https://github.com/rabbitmq/rabbitmq-tracing/issues/31)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.8.1.tar.xz`.
diff --git a/release-notes/3.8.10.md b/release-notes/3.8.10.md
new file mode 100644
index 0000000000..547eede86c
--- /dev/null
+++ b/release-notes/3.8.10.md
@@ -0,0 +1,351 @@
+## RabbitMQ 3.8.10
+
+**Important**: this release has been [superseded by `3.8.11`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.11)
+which addresses a couple of bugs specific to this release (`3.8.10`)
+
+All users are advised to consult the set of changes in this release but then **upgrade straight
+to `3.8.11` or a newer version if available** and skip this release.
+
+
+RabbitMQ `3.8.10` is a maintenance release.
+Besides containing several bug fixes, it introduces a couple new features for quorum queues.
+
+This release [**no longer supports** Erlang 21.3](https://groups.google.com/forum/#!topic/rabbitmq-users/v3K5nZNsfwM).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 22](https://www.rabbitmq.com/which-erlang.html) or
+[Erlang 23](http://blog.erlang.org/OTP-23-Highlights/).
+
+Erlang 21.3 [**is no longer supported**](https://groups.google.com/forum/#!topic/rabbitmq-users/v3K5nZNsfwM).
+`22.3` or `23.1` releases are recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Deprecations
+
+This release deprecates [boot time definition import](https://www.rabbitmq.com/definitions.html#import-on-boot) build into the management plugin.
+The feature still exists but is no longer documented or tested. Please switch to
+the built-in definition import mechanism:
+
+``` ini
+# DEPRECATED, requires management plugin to be enabled,
+# does not support import of definitions that depend on plugins
+# (e.g. Shovels or custom exchanges)
+management.load_definitions = /path/to/definitions/file.json
+```
+
+to
+
+``` ini
+# built-in feature, supports import of definitions
+# that depend on plugins (e.g. Shovels or custom exchanges)
+load_definitions = /path/to/definitions/file.json
+```
+
+Definition import in the management plugin (`management.load_definitions`) will be **removed** in
+a future RabbitMQ release.
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release series (e.g. `3.7.x`).
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 22.x or Later Versions
+
+When upgrading to this release from `3.7.15` or an older version, extra care has to be taken.
+
+Because older RabbitMQ CLI tools can be incompatible with Erlang 22+ releases,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` on Erlang 21.3 first,
+then Erlang 22.x or 23.x, then RabbitMQ to the most recent 3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * Starting with `3.8.0`, an unintentionally introduced assertion could prevent classic mirrored queue
+ mirrors from starting successfully in case the primary replica changed concurrently with their startup.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2449](https://github.com/rabbitmq/rabbitmq-server/pull/2449)
+
+ * Repeated polling of a quorum queue with `basic.get` that yielded an empty response could
+ result in an unbounded growth of log segment files on on disk.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2651](https://github.com/rabbitmq/rabbitmq-server/pull/2651)
+
+ * RabbitMQ core plugin activation was revisited to be closer to that of `3.7.x`. This does not any
+ related improvements w.r.t. definition import, e.g. those introudced in [RabbitMQ `3.8.6`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.6).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2656](https://github.com/rabbitmq/rabbitmq-server/pull/2656)
+
+ * Syslog dependency was not started correctly. This meant no log messages were sent to Syslog.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2456](https://github.com/rabbitmq/rabbitmq-server/pull/2456)
+
+ * `rabbitmq-diagnostics check_if_node_is_quorum_critical` returned a false positive for a node [marked for maintenance](https://www.rabbitmq.com/upgrade.html#maintenance-mode).
+ Given the refinement to the `rabbitmq-upgrade drain` command in [rabbitmq/rabbitmq-server#2474](https://github.com/rabbitmq/rabbitmq-server/issues/2474),
+ `rabbitmq-diagnostics check_if_node_is_quorum_critical` now will unconditionally return a success
+ if target node is under maintanence.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2469](https://github.com/rabbitmq/rabbitmq-server/issues/2469)
+
+ * Queues could update their internal bookkeeping state incorrectly in some cases, leading to a `file_handle_cache`
+ operation exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2488](https://github.com/rabbitmq/rabbitmq-server/issues/2488)
+
+#### Enhancements
+
+ * [Quorum queues](https://www.rabbitmq.com/quorum-queues.html) now can use the [`reject-publish`](https://www.rabbitmq.com/maxlength.html#overflow-behaviour)
+ max length overflow strategy.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2407](https://github.com/rabbitmq/rabbitmq-server/pull/2407)
+
+ * [Quorum queues](https://www.rabbitmq.com/quorum-queues.html) now support [consumer priority](https://www.rabbitmq.com/consumers.html#priority).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2451](https://github.com/rabbitmq/rabbitmq-server/pull/2451)
+
+ * Per-user connection and queue limits. This is similar to [per-vhost limits](https://www.rabbitmq.com/vhosts.html#limits) but,
+ as the name suggests, these limits are associated for a given user.
+
+ The limits are controlled using CLI tools or the HTTP API:
+
+ ``` sh
+ # limits user "user-e8d3f85c" to up to five connections and ten channels
+ rabbitmqctl set_user_limits "user-e8d3f85c" '{"max-connections": 5, "max-channels": 10}'
+
+ # clears the maximum number of connections limit for the user
+ rabbitmqctl clear_user_limits "user-e8d3f85c" "max-connections"
+
+ # clears all limits for the user
+ rabbitmqctl clear_user_limits "user-e8d3f85c" "all"
+ ```
+
+ Contributed by Erlang Solutions, sponsored by [CloudAMQP](https://cloudamqp.com).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#607](https://github.com/rabbitmq/rabbitmq-server/issues/607), [rabbitmq/rabbitmq-server#2380](https://github.com/rabbitmq/rabbitmq-server/pull/2380)
+
+ * TLSv1.3 is no longer considered experimental (on Erlang 23) and excluded from TLS listener configuration.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#415](https://github.com/rabbitmq/rabbitmq-common/issues/415)
+
+ * When a node is put under maintenance, it will now stop all local quorum queue replicas after completing
+ leadership transfer. This means that no quorum queue replicas on the node will be considered for
+ leader elections.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2474](https://github.com/rabbitmq/rabbitmq-server/issues/2474)
+
+ * Nodes now keep track of failed client authentication attempts and expose it as a metric via both the [Prometheus endpoint](https://www.rabbitmq.com/prometheus.html)
+ and the HTTP API at `GET /api/auth/attempts/{node}`.
+
+ The tracking is done for every cluster node individually. The protocols that support authentication attempt tracking
+ right now are AMQP 0-9-1, AMQP 1.0 and MQTT.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2436](https://github.com/rabbitmq/rabbitmq-server/pull/2436)
+
+ * `bypass_pem_cache` is a new configuration key that makes it possible to disable the cache of PEM files
+ [used by the Erlang TLS implementation](https://blog.heroku.com/how-we-sped-up-sni-tls-handshakes-by-5x).
+ This rotated certificates quicker to detect for nodes but can also increase latency for inbound
+ TLS connections, e.g. under [high connection churn scenarios](https://www.rabbitmq.com/connections.html#high-connection-churn).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2686](https://github.com/rabbitmq/rabbitmq-server/pull/2686)
+
+ * Definition import now safely handles some missing queue object arguments.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2668](https://github.com/rabbitmq/rabbitmq-server/issues/2668)
+
+ * `rabbitmq.conf` schema now allows for [peer certificate chain verification depth](https://www.rabbitmq.com/ssl.html#peer-verification) to be set to zero.
+ When this value is used, peer certificate must be [signed by a trusted CA certificate directly](https://erlang.org/doc/man/ssl.html#type-allowed_cert_chain_length) in order
+ for the verification to pass.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2493](https://github.com/rabbitmq/rabbitmq-server/issues/2493)
+
+ * `RABBITMQ_IO_THREAD_POOL_SIZE` will no longer be respected by RabbitMQ nodes. It [is no longer relevant](http://blog.erlang.org/IO-Polling/)
+ with supported Erlang versions and will simplify [`rabbitmq-diagnostics runtime_thread_stats`](https://www.rabbitmq.com/runtime.html#thread-stats) output.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2473](https://github.com/rabbitmq/rabbitmq-server/issues/2473)
+
+ * The `+K` [runtime parameter](https://www.rabbitmq.com/runtime.html) is no longer used by RabbitMQ nodes as it has [no effect](http://blog.erlang.org/IO-Polling/)
+ with supported Erlang versions.
+
+ Contributed by Ayanda Dube (Erlang Solutions).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2470](https://github.com/rabbitmq/rabbitmq-server/pull/2470)
+
+
+### CLI
+
+#### Enhancements
+
+ * It is now possible to enable all feature flags in a single command:
+
+ ``` sh
+ rabbitmqctl enable_feature_flag all
+ ```
+
+ Note that this will only enable the flags supported by all cluster members.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#455](https://github.com/rabbitmq/rabbitmq-cli/issues/455)
+
+ * `rabbitmq-queues peek` is a new command that allows for peeking at a queue position. It is only
+ supported by quorum queues (and not classic queues):
+
+ ``` sh
+ # peeks at the head of queue "qq.1" in virtual host "staging"
+ rabbitmq-queues peek --vhost "staging" "qq.1 1"
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#452](https://github.com/rabbitmq/rabbitmq-cli/pull/452/)
+
+ * `rabbitmq-queues reclaim_quorum_memory` is a new command that forces Raft WAL compaction which in most cases should reduce memory footprint of a quorum queue:
+
+ ``` sh
+ rabbitmq-queues reclaim_quorum_memory --vhost "staging" "qq.1"
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#453](https://github.com/rabbitmq/rabbitmq-cli/pull/453)
+
+#### Bug Fixes
+
+ * `rabbitmqctl rename_cluster_node` failed with an error about a missing directory.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#457](https://github.com/rabbitmq/rabbitmq-cli/issues/457)
+
+ * `rabbitmqctl export_definitions` exported optional binding arguments as blank.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2500](https://github.com/rabbitmq/rabbitmq-server/issues/2500)
+
+ * Force removal of a node did not remove it from quorum queue member lists.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2414](https://github.com/rabbitmq/rabbitmq-server/issues/2414)
+
+ * CLI tools now pick a unique identifier from a pool. Previously the pool of possible values was theoretically
+ unbounded, which could result in an atom table exhaustion condition on long running nodes that
+ had a lot of CLI tool invocations (e.g. CLI tools were used to collect monitoring data frequently).
+
+ Such use of CLI tools for [monitoring](https://www.rabbitmq.com/monitoring.html) is not recommended:
+ there are better options such as [Prometheus and Grafana](https://www.rabbitmq.com/prometheus.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#461](https://github.com/rabbitmq/rabbitmq-cli/pull/461)
+
+
+### MQTT Plugin
+
+#### Bug Fixes
+
+ * Raft-based client ID tracker is now significantly more efficient in handling failing
+ or closed connections. This prevents potential [high memory usage of tracking tables](https://github.com/rabbitmq/rabbitmq-server/discussions/2688)
+ on the node.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2692](https://github.com/rabbitmq/rabbitmq-server/pull/2692)
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Federation links that failed to connect due to a TCP connection timeout [leaked resources](https://www.rabbitmq.com/connections.html#monitoring).
+ Other connection failure scenarios did not result in a leak.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#119](https://github.com/rabbitmq/rabbitmq-federation/pull/119)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * An autodeleted Shovel would re-appear on other cluster nodes after the node that hosted it originally was restarted.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2655](https://github.com/rabbitmq/rabbitmq-server/issues/2655)
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * `GET /metrics/per-object` is a new endpoint that always returns [individual entity metrics](https://www.rabbitmq.com/prometheus.html#metric-aggregation), even if
+ `GET /metrics` is configured to return aggregated results.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2659](https://github.com/rabbitmq/rabbitmq-server/pull/2659)
+
+### Management Plugin
+
+#### Enhancements
+
+ * [Individual health checks](https://www.rabbitmq.com/monitoring.html#health-checks) are now available through the HTTP API.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#844](https://github.com/rabbitmq/rabbitmq-management/issues/844)
+
+#### Bug Fixes
+
+ * Quorum queue consumer counter on on the queue list page was not updated when consumers were cancelled or failed.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2421](https://github.com/rabbitmq/rabbitmq-server/issues/2421)
+
+ * The aliveness check endpoint now responds with a `503 Service Unavailable` instead of a 500 when
+ it cannot publish a test message, e.g. because a policy with zero message TTL was applied to all queues.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#855](https://github.com/rabbitmq/rabbitmq-management/issues/855)
+
+
+### AWS Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * AWS API request errors are now propagated. This means that should an AWS API request fail,
+ the peer discovery subsystem will retry it a limited number of times.
+
+ Contributed by Stefan @stefanmoser Moser.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-aws#40](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws/pull/40)
+
+
+### RabbitMQ Erlang Client
+
+#### Bug Fixes
+
+ * `amqp_channel:call/4` timeouts are now safly reconciled with connection-level timeouts.
+
+ Contributed by Ayanda Dube.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2660](https://github.com/rabbitmq/rabbitmq-server/pull/2660)
+
+### Windows Installer
+
+#### Bug Fixes
+
+ * Windows installer is now signed by a new (rolled) key.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2666](https://github.com/rabbitmq/rabbitmq-server/issues/2666)
+
+ * The installer now correctly creates directories when RabbitMQ is installed to
+ a different drive from the shell (`cmd.exe`).
+
+ Contributed by Jiahao @Y2Nk4 Guo.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2682](https://github.com/rabbitmq/rabbitmq-server/pull/2682)
+
+## Dependency Upgrades
+
+ * `credentials_obfuscation` was upgraded [from 2.2.3 to 2.3.0](https://github.com/rabbitmq/credentials-obfuscation/blob/master/ChangeLog.md)
+ * `ranch` is [no longer a dependency](https://github.com/rabbitmq/rabbitmq-server/pull/2707) of `rabbit_common`
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.10.tar.xz`.
diff --git a/release-notes/3.8.11.md b/release-notes/3.8.11.md
new file mode 100644
index 0000000000..b29f562d4f
--- /dev/null
+++ b/release-notes/3.8.11.md
@@ -0,0 +1,72 @@
+## RabbitMQ 3.8.11
+
+RabbitMQ `3.8.11` is a maintenance release that contains bug fixes.
+
+This release [**no longer supports** Erlang 21.3](https://groups.google.com/forum/#!topic/rabbitmq-users/v3K5nZNsfwM).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 22](https://www.rabbitmq.com/which-erlang.html) or
+[Erlang 23](http://blog.erlang.org/OTP-23-Highlights/).
+
+Erlang 21.3 [**is no longer supported**](https://groups.google.com/forum/#!topic/rabbitmq-users/v3K5nZNsfwM).
+`22.3` or `23.2` releases are recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release series (e.g. `3.7.x`).
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 22.x or Later Versions
+
+When upgrading to this release from `3.7.15` or an older version, extra care has to be taken.
+
+Because older RabbitMQ CLI tools can be incompatible with Erlang 22+ releases,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` on Erlang 21.3 first,
+then Erlang 22.x or 23.x, then RabbitMQ to the most recent 3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * Quorum queues that had active consumers during a rolling cluster upgrade could run into an exception and restart loop.
+
+ This issue is specific to [RabbitMQ `3.8.10`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.10).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2728](https://github.com/rabbitmq/rabbitmq-server/pull/2728)
+
+ * Nodes that had `reverse_dns_lookup` set to `true` could not accept client connections.
+
+ This issue is specific to [RabbitMQ `3.8.10`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.10).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2730](https://github.com/rabbitmq/rabbitmq-server/issues/2730)
+
+
+## Dependency Upgrades
+
+No dependency changes.
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.11.tar.xz`.
diff --git a/release-notes/3.8.12.md b/release-notes/3.8.12.md
new file mode 100644
index 0000000000..e9ea998eac
--- /dev/null
+++ b/release-notes/3.8.12.md
@@ -0,0 +1,214 @@
+## RabbitMQ 3.8.12
+
+RabbitMQ `3.8.12` is a maintenance release that contains bug fixes.
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 22](https://www.rabbitmq.com/which-erlang.html) or
+[Erlang 23](http://blog.erlang.org/OTP-23-Highlights/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release series (e.g. `3.7.x`).
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 22.x or Later Versions
+
+When upgrading to this release from `3.7.15` or an older version, extra care has to be taken.
+
+Because older RabbitMQ CLI tools can be incompatible with Erlang 22+ releases,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` on Erlang 21.3 first,
+then Erlang 22.x or 23.x, then RabbitMQ to the most recent 3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * [Maintenance mode](https://www.rabbitmq.com/upgrade.html#maintenance-mode) no longer transfers leaders of classic mirrored queue.
+
+ The original transfer implementation was not guaranteed to be safe when only a
+ subset of cluster nodes hosted replicas. In addition, the process could take
+ a lot of time in environments with a lot of classic mirrored queues, more
+ than in case of node shutdown.
+
+ Quorum queue leadership transfer is still performed as it is dramatically more
+ efficient and never suffered from any downsides of leadership transfer of CMQs.
+
+ GitHub issue: [#2749](https://github.com/rabbitmq/rabbitmq-server/issues/2749)
+
+ * Exclusive queues that happen to be matched by classic queue mirroring policies
+ will no longer be considered for rebalancing operations since they are not
+ actually mirrored (the lifecycle is by design tied to a single client connection).
+
+ GitHub issue: [#2795](https://github.com/rabbitmq/rabbitmq-server/issues/2795)
+
+ * Default replica count for quorum queues was using an incorrect configuration key name.
+ The default value is now `3`, so in a cluster of five or seven nodes, only three
+ will host replicas for newly declared quorum queues. One replica is the node to which
+ the declaring client is connected to, two others are selected randomly.
+
+ Declaring connections can control the number of replicas using the `x-quorum-initial-group-size`
+ optional queue argument at declaration time.
+
+ GitHub issue: [#2759](https://github.com/rabbitmq/rabbitmq-server/pull/2759)
+
+ * Default [runtime busy waiting](https://www.rabbitmq.com/runtime.html#cpu-reduce-idle-usage) settings were
+ changed to `none` by default, which reduces CPU footprint on nodes that are mostly idle. The values
+ can be overridden for workloads that benefit from different values.
+
+ Contributed by @carlhoerberg (CloudAMQP)
+
+ GitHub issue: [#2803](https://github.com/rabbitmq/rabbitmq-server/pull/2803)
+
+ * Definition import now imports runtime parameters after exchanges, queues, and bindings
+ to reduce the likelihood of starting dynamic Shovels from racing with topology
+ imports.
+
+ GitHub issue: [#2798](https://github.com/rabbitmq/rabbitmq-server/issues/2798)
+
+ * Memory monitor could run into an exception if queried very early on node boot
+ before it was fully initialized.
+
+ Contributed by @tomyouyou.
+
+ GitHub issue: [#2733](https://github.com/rabbitmq/rabbitmq-server/pull/2733)
+
+#### Enhancements
+
+ * `rabbitmq.conf` validation of TLS certificate and private key paths is now more thorough:
+ it checks if the files have read or read/write permissions instead of checking if it can read
+ the file's metadata.
+
+ GitHub issue: [#2685](https://github.com/rabbitmq/rabbitmq-server/issues/2685)
+
+### CLI Tools
+
+#### Enhancements
+
+ * `rabbitmqctl close_all_user_connections` is a new command that closes all connections that
+ authenticated using a specific username:
+
+ ```
+ rabbitmqctl close_all_user_connections "user-99fc15d3f" "closed to force reconnection"
+ ```
+
+ GitHub issue: [#2715](https://github.com/rabbitmq/rabbitmq-server/issues/2715)
+
+
+### Prometheus Plugin
+
+#### Bug Fixes
+
+ * AuthN authentication attempts metrics produced duplicate `TYPE` and `HELP` strings in
+ the scraping API endpoint output.
+
+ Some metrics were renamed to avoid duplication. The new names are `auth_attempts_detailed_total`,
+ `auth_attempts_detailed_succeeded_total`, and `auth_attempts_detailed_failed_total`.
+
+ This issue is specific to RabbitMQ `3.8.10` and `3.8.11`.
+
+ GitHub issue: [#2740](https://github.com/rabbitmq/rabbitmq-server/issues/2740)
+
+
+### LDAP Plugin
+
+#### Enhancements
+
+ * The plugin now supports two new configuration settings that control SNI and hostname verification
+ behavior for TLS-enabled LDAP client connections:
+
+ ``` ini
+ # leave out to keep the default behavior (exact hostname matching)
+ auth_ldap.ssl_options.hostname_verification = wildcard
+
+ # set to none to disable SNI
+ auth_ldap.ssl_options.sni = a.rabbitmq.hostname.dev
+ ```
+
+ These changes are based on the [research and suggestions](https://pcable.net/posts/2021-02-10-rmq-ldap/) from Patrick @patcable Cable.
+
+ GitHub issue: [#2805](https://github.com/rabbitmq/rabbitmq-server/issues/2805)
+
+
+## Shovel Plugin
+
+#### Enhancements
+
+ * Dynamic Shovels now can specify [optional queue arguments](https://www.rabbitmq.com/queues.html#optional-arguments) for
+ the queues they declare. Note that these settings will not have any effects for cases where the plugin
+ uses already existing queues.
+
+ This allows for the Shovel-managed queues to be [quorum](https://www.rabbitmq.com/quorum-queues.html). For the arguments
+ that must not be provided at queue declaration time, [policies](https://www.rabbitmq.com/parameters.html#policies)
+ is the right approach.
+
+ The arguments are specified using the `""` and `""` arguments for source and destination queues, respectively:
+
+ ``` json
+ {
+ "src-protocol": "amqp091",
+ "src-uri": ["amqp://localhost"],
+ "src-queue": "src-queue",
+ "dest-protocol": "amqp091",
+ "dest-uri": ["amqp://localhost"],
+ "dest-queue": "dest-queue",
+ "src-queue-args": {
+ "x-queue-type": "quorum"
+ },
+ "dest-queue-args": {
+ "x-queue-type": "quorum"
+ }
+ }
+ ```
+
+ GitHub issue: [#2799](https://github.com/rabbitmq/rabbitmq-server/issues/2799)
+
+
+## AuthN/AuthZ Cache Plugin
+
+### Enhancement
+
+ * Cache operation timeout has been increased to 15s (same as the channel operation timeout in the core).
+ This prevents sporadic timeouts in heavily loaded systems.
+
+ GitHub issue: [#2792](https://github.com/rabbitmq/rabbitmq-server/pull/2792)
+
+
+## RabbitMQ Erlang Client
+
+#### Bug Fixes
+
+ * The client performed hostname resolution twice even if IP version preference was specified by the user.
+
+ Contributed by Pawel @haljin Antemijczuk.
+
+ GitHub issue: [#2748](https://github.com/rabbitmq/rabbitmq-server/issues/2748)
+
+## Dependency Upgrades
+
+No dependency changes.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.12.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.13.md b/release-notes/3.8.13.md
new file mode 100644
index 0000000000..407cf222e6
--- /dev/null
+++ b/release-notes/3.8.13.md
@@ -0,0 +1,155 @@
+## RabbitMQ 3.8.13
+
+RabbitMQ `3.8.13` is a maintenance release.
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 22.3](https://www.rabbitmq.com/which-erlang.html).
+[Erlang 23](http://blog.erlang.org/OTP-23-Highlights/) is highly recommended
+for best forward compatibility with future RabbitMQ versions.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release series (e.g. `3.7.x`).
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 22.x or Later Versions
+
+When upgrading to this release from `3.7.15` or an older version, extra care has to be taken.
+
+Because older RabbitMQ CLI tools can be incompatible with Erlang 22+ releases,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` on Erlang 21.3 first,
+then Erlang 22.3 or 23.x, then RabbitMQ to the most recent 3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * Nodes that had an unusually long name (e.g. 150 or more characters) could overflow
+ message property lengths with the [direct reply-to](https://www.rabbitmq.com/direct-reply-to.html) identifier they generated.
+ Now the generated values are almost entirely decoupled from node name length.
+
+ GitHub issue: [#2842](https://github.com/rabbitmq/rabbitmq-server/pull/2842)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Consumer utilisation metric has been renamed to consumer capacity which more accurately represents
+ what it measures. The metric indicates the percentage of time that RabbitMQ has attempted a delivery
+ to a consumer of a queue, and was allowed to do so by the [channel prefetch value](https://www.rabbitmq.com/confirms.html#channel-qos-prefetch) and
+ the number of currently unacknowledged deliveries.
+
+ For queues that have no consumers, the metric now returns 0%. For consumers that accept all deliveries
+ attempted, the metric will be around 100% (optimal).
+
+ For queues that do have consumers but no traffic the metric will be at 100%. The assumption here is that consumers
+ can absorb all the load. This case is intentionally treated differently from the one above with no online consumers.
+
+ GitHub issue: [#2843](https://github.com/rabbitmq/rabbitmq-server/pull/2843)
+
+
+### Prometheus Plugin
+
+#### Bug Fixes
+
+ * Consumer utilisation metric has been renamed to consumer capacity which more accurately represents
+ what it measures. See a more detailed note above in the Management plugin section.
+
+ GitHub issue: [#2843](https://github.com/rabbitmq/rabbitmq-server/pull/2843)
+
+
+### Federation Plugin
+
+#### Enhancements
+
+ * Quorum queues now can be federated.
+
+ GitHub issue: [#2756](https://github.com/rabbitmq/rabbitmq-server/issues/2756)
+
+ * Exchange federation now supports a new option, `channel_use_mode`, which allows a single
+ channel to be used for all federation link activity if set to `single`:
+
+ ``` shell
+ rabbitmqctl set_parameter federation-upstream dc-2 '{"uri":"amqp://dc2.messaging.coolcorp.local:5672", "channel-use-mode": "single"}'
+ ```
+
+ This helps avoid a race condition between topology changes and message flow
+ and the cost of topology operations reducing message transfer rates and increasing
+ rate variability.
+
+ The default value is `multiple`. It means that every link will use multiple channels (one for message transfer,
+ one for topology changes, etc), which has been the case since rabbitmq/rabbitmq-federation#97.
+ This is the recommended mode for most environments, and all environments with a mostly static topology.
+
+ See [rabbitmq/rabbitmq-federation#97](https://github.com/rabbitmq/rabbitmq-federation/pull/97) for the background.
+
+ GitHub issue: [#2829](https://github.com/rabbitmq/rabbitmq-server/pull/2829)
+
+
+### AMQP 1.0 Plugin
+
+#### Bug Fixes
+
+ * The plugin could run into unhandled exceptions.
+
+ GitHub issue: [#2830](https://github.com/rabbitmq/rabbitmq-server/pull/2830)
+
+
+### OAuth 2 AuthN/AuthZ Backend
+
+#### Enhancements
+
+ * Several settings of the plugin are now exposed to `rabbitmq.conf` and provide validations:
+
+ ``` ini
+ auth_backends.1 = oauth2
+ auth_oauth2.resource_server_id = a_resource_server_id
+ auth_oauth2.additional_scopes_key = a_custom_scope_key
+ auth_oauth2.default_key = id1
+ auth_oauth2.signing_keys.id1 = /path/to/signing.key1.pem
+ auth_oauth2.signing_keys.id2 = /path/to/signing.key2.pem
+ ```
+
+ GitHub issue: [#2550](https://github.com/rabbitmq/rabbitmq-server/issues/2550)
+
+ * JWT tokens now can be fetched from a JWKS endpoint provided using the `rabbitmq_auth_backend_oauth2.key_config.jwks_url`
+ key in `advanced.config`.
+
+ Contributed by Teo @teozkr Klestrup Röijezon.
+
+ GitHub issue: [#2791](https://github.com/rabbitmq/rabbitmq-server/pull/2791)
+
+
+## Dependency Upgrades
+
+ * `lager` was [upgraded to 3.8.2](https://github.com/erlang-lager/lager/compare/3.8.0..3.8.2)
+ * `cuttlefish` was [upgraded to 2.6.0](https://github.com/kyorai/cuttlefish/compare/v2.4.0..v2.6.0)
+ * `credentials_obfuscation` was [upgraded to 2.4.0](https://github.com/rabbitmq/credentials-obfuscation/blob/master/ChangeLog.md#changes-between-230-and-240-february-18-2021)
+ * `observer_cli` was [upgraded to 1.6.0](https://github.com/zhongwencool/observer_cli/compare/1.5.4..v1.6.0)
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.13.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.14.md b/release-notes/3.8.14.md
new file mode 100644
index 0000000000..bd963c847a
--- /dev/null
+++ b/release-notes/3.8.14.md
@@ -0,0 +1,77 @@
+## RabbitMQ 3.8.14
+
+RabbitMQ `3.8.14` is a maintenance release that restores
+Erlang 22.3 compatibility for environments that use [direct reply-to](/direct-reply-to.html).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 22.3](https://www.rabbitmq.com/which-erlang.html).
+[Erlang 23](http://blog.erlang.org/OTP-23-Highlights/) is highly recommended
+for best forward compatibility with future RabbitMQ versions.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision a recent version of Erlang `23.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release series (e.g. `3.7.x`).
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 23.x or Later Versions
+
+When upgrading to this release from `3.7.15` or an older version, extra care has to be taken.
+
+Because older RabbitMQ CLI tools can be incompatible with Erlang 22+ releases,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` on Erlang 21.3 first,
+then Erlang 23.x, then RabbitMQ to the most recent 3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * [Direct reply-to](https://www.rabbitmq.com/direct-reply-to.html) failed witn an exception on Erlang 22.3.
+ Nodes running on Erlang 23 are not affected.
+
+ GitHub issue: [#2857](https://github.com/rabbitmq/rabbitmq-server/pull/2857)
+
+ * A warnings about async I/O threads was replaced with a similar one about dirty I/O
+ schedulers. This keeps the message up with the settings used in modern Erlang releases.
+
+ GitHub issue: [#2854](https://github.com/rabbitmq/rabbitmq-server/pull/2854)
+
+### Other
+
+#### Enhancements
+
+ * Community-maintained [RabbitMQ OCF scripts](https://github.com/rabbitmq/rabbitmq-server/blob/master/scripts/rabbitmq-server-ha.ocf) were revisited.
+
+ Contributed by Michele @mbaldessari Baldessari.
+
+ GitHub issue: [#2853](https://github.com/rabbitmq/rabbitmq-server/pull/2853)
+
+
+## Dependency Upgrades
+
+ There were no dependency changes.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.14.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.15.md b/release-notes/3.8.15.md
new file mode 100644
index 0000000000..f69e6278ca
--- /dev/null
+++ b/release-notes/3.8.15.md
@@ -0,0 +1,175 @@
+## RabbitMQ 3.8.15
+
+RabbitMQ `3.8.15` is a maintenance release that includes two security patches.
+
+All users are advised to consult the set of changes in this release but then
+**upgrade straight to `3.8.16` or a newer version if available** and skip this release.
+
+### Security Patches
+
+This release addresses two CVEs:
+
+ * [CVE-2021-22117](https://tanzu.vmware.com/security/cve-2021-22117)
+ * [CVE-2021-22116](https://tanzu.vmware.com/security/cve-2021-22116)
+
+Our team would like to thank
+
+ * Jonathan Knudsen from the Synopsys Cybersecurity Research Center (CyRC)
+ * Robert Chen from DeepSurface Security
+
+for responsibly disclosing the vulnerabilities and helping assess and test
+the patches.
+
+### Obtaining Packages
+
+This is the first release in the [post-Bintray era](https://blog.rabbitmq.com/posts/2021/03/migrate-off-of-bintray/).
+Because Bintray shut down on May 1st, 2021, this version is not distributed via Bintray.
+From now on, releases are distributed via GitHub, [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release is the [last release to support Erlang 22.3](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+[Erlang 23](http://blog.erlang.org/OTP-23-Highlights/) is highly recommended
+for best forward compatibility with future RabbitMQ versions.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision a recent version of Erlang `23.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release series (e.g. `3.7.x`).
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * Quorum queues did not take snapshots as frequently as they are expected to with some configurations.
+ This resulted in much higher peak disk space usage for queues that did not have a meaningful backlog.
+
+ GitHub issue: [#2974](https://github.com/rabbitmq/rabbitmq-server/issues/2974)
+
+ * Quorum queue name was unintentionally limited: the length of virtual host and actual queue name
+ was limited to 254 characters. Quorum queues that had longer names failed to be declared.
+
+ * Deleting a quorum queue would leave some of its internal metrics data around.
+
+ GitHub issue: [#2846](https://github.com/rabbitmq/rabbitmq-server/pull/2846)
+
+ * Client destination address is now obtained w.r.t. the [Proxy protocol](https://www.rabbitmq.com/networking.html#proxy-protocol) settings.
+
+ Contributed by @carlhoerberg (CloudAMQP).
+
+ GitHub issue: [#2942](https://github.com/rabbitmq/rabbitmq-server/pull/2942)
+
+#### Enhancements
+
+ * Consumers that consume messages and do not acknowledge them will now have a 15 minute
+ acknowledgement timeout applied to them by default. Operators can increase the timeout if necessary.
+
+ Such consumers delay or even prevent on disk data compaction, which can run a node out of disk
+ space much earlier than anticipated.
+
+ GitHub issue: [#2990](https://github.com/rabbitmq/rabbitmq-server/pull/2990)
+
+ * Channel interceptors now can return channel-level exceptions.
+
+ Contributed by Ayanda @Ayanda-D Dube.
+
+ GitHub issue: [#2989](https://github.com/rabbitmq/rabbitmq-server/pull/2989)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmq-diagnostics status` could run into an exception when formatting responses
+ from nodes where high VM memory watermark was configured using `advanced.config`.
+
+ GitHub issue: [#2964](https://github.com/rabbitmq/rabbitmq-server/issues/2964)
+
+ * `rabbitmq-queues rebalance` will no longer pick nodes [under maintenance](https://www.rabbitmq.com/upgrade.html#maintenance-mode)
+ as new queue leader placement candidates.
+
+ GitHub issue: [#2993](https://github.com/rabbitmq/rabbitmq-server/pull/2993)
+
+#### Enhancements
+
+ * `rabbitmq-diagnostics remote_shell` is a new command that opens a remote Erlang shell
+ to the target node. This simplifies troubleshooting of a running node.
+
+ GitHub issue: [#2860](https://github.com/rabbitmq/rabbitmq-server/pull/2860)
+
+ * `rabbitmq-queues await_online_quorum_plus_one` is now a no-op in a single node cluster
+ since the command does not make sense when there is only one node.
+
+ GitHub issue: [#2890](https://github.com/rabbitmq/rabbitmq-server/pull/2890)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * When a virtual host was created with tags via the HTTP API, the tags were unintentionally
+ concatenated together.
+
+ GitHub issue: [#2982](https://github.com/rabbitmq/rabbitmq-server/pull/2982)
+
+ * More precise UI description for message consumption mode that is destructive (consumes in automatic acknowledgement mode).
+
+ GitHub issue: [#3011](https://github.com/rabbitmq/rabbitmq-server/pull/3011)
+
+
+### AWS Peer Discovery Plugin
+
+#### Enhancements
+
+ * [AWS peer discovery mechanism](https://www.rabbitmq.com/cluster-formation.html#peer-discovery-aws) now supports
+ [Instance Metadata Service v2](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html).
+ In case it is not available, requests will fall back to the original metadata service endpoint.
+
+ The v2 endpoint offers substantial security improvements and is one of
+ the [AWS best practices](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#ec2-8-remediation).
+
+ Contributed by @thuandb (AWS).
+
+ GitHub issue: [#2952](https://github.com/rabbitmq/rabbitmq-server/pull/2952)
+
+
+### Auth Mechanism TLS Plugin
+
+#### Enhancements
+
+ * The plugin now has limited support for username extraction from SAN of type "other name".
+ Note that the type by definition supports arbitrary values, so supporting all possible
+ inputs is not realistic.
+
+ Contributed by @Thibi2000.
+
+ GitHub issue: [#2983](https://github.com/rabbitmq/rabbitmq-server/issues/2983), [#2985](https://github.com/rabbitmq/rabbitmq-server/issues/2985)
+
+## Dependency Upgrades
+
+ There were no dependency changes.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.15.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.16.md b/release-notes/3.8.16.md
new file mode 100644
index 0000000000..ff44bdb1f5
--- /dev/null
+++ b/release-notes/3.8.16.md
@@ -0,0 +1,87 @@
+## RabbitMQ 3.8.16
+
+RabbitMQ `3.8.16` is a maintenance release.
+
+It reintroduces `rabbitmq_peer_discovery_aws` plugin that was unintentionally removed
+from `3.8.15` due to release pipeline changes.
+
+This is the first release to drop support for Erlang 22.3 and introduce support for the [upcoming Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/) release.
+
+### Obtaining Packages
+
+This release is distributed via GitHub, [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This is the [first release to require Erlang 23.2 or later and support upcoming Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision a recent version of Erlang `23.3`.
+
+
+## Upgrade and Compatibility Notes
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release series (e.g. `3.7.x`).
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Erlang Release Series Support
+
+ * This release [introduces Erlang 24 support and drops support for Erlang 22](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/). Erlang 24 offers non-trivial real world throughput improvements for many
+ RabbitMQ installations and workloads. It is expected to ship in May 2021.
+
+ In the meantime, see [Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) to
+ learn how to provision a recent version of Erlang `23.3`.
+
+ GitHub issue: [#2900](https://github.com/rabbitmq/rabbitmq-server/pull/2900)
+
+
+### AWS Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * This plugin was unintentionally excluded from the distribution in `3.8.15` and has now been
+ added back.
+
+ GitHub issue: [#3023](https://github.com/rabbitmq/rabbitmq-server/issues/3023)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * Messaging polling HTTP API endpoint leaked temporary connections it used in case of a polling timeout.
+ Note that **this endpoint is not recommended to be used** outside of QA environments.
+ Use a [long-lived consumer](https://www.rabbitmq.com/consumers.html) via one of the supported messaging protocols.
+
+ Contributed by @luos.
+
+ GitHub issue: [#3024](https://github.com/rabbitmq/rabbitmq-server/pull/3024)
+
+
+## Dependency Upgrades
+
+* Cuttlefish has been upgraded from [`2.6.0` to `3.0.0`](https://github.com/kyorai/cuttlefish/compare/v2.6.0..v3.0.0)
+* Lager has been upgraded from [`3.8.2` to `3.9.1`](https://github.com/erlang-lager/lager/compare/3.8.2..3.9.1)
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.16.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.17.md b/release-notes/3.8.17.md
new file mode 100644
index 0000000000..832de5bcae
--- /dev/null
+++ b/release-notes/3.8.17.md
@@ -0,0 +1,145 @@
+## RabbitMQ 3.8.17
+
+RabbitMQ `3.8.17` is a maintenance release that includes a security patch.
+
+### Security Patches
+
+This release addresses an undisclosed vulnerability with CVSS 3.1 [score of 3.1](https://www.first.org/cvss/calculator/3.1#CVSS:3.1/AV:N/AC:H/PR:H/UI:R/S:U/C:L/I:L/A:N/E:P/RL:O/RC:C) (low): [CVE-2021-32718](https://github.com/rabbitmq/rabbitmq-server/security/advisories/GHSA-c3hj-rg5h-2772).
+
+Our team would like to thank Christian Rellmann from [usd AG](https://www.usd.de/) for [responsibly disclosing](https://www.rabbitmq.com/contact.html#security)
+the vulnerability and helping us verify a fix.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Enhancements
+
+ * Queue index efficiency improvements, mostly around peak memory consumption with large backlogs of small messages.
+
+ Note that some of the improvements come from more optimal defaults: those will only be
+ applicable to newly created virtual hosts (or new clusters).
+
+ GitHub issues: [#2954](https://github.com/rabbitmq/rabbitmq-server/pull/2954), [#3041](https://github.com/rabbitmq/rabbitmq-server/pull/3041)
+
+ * [Consumer delivery acknowledgement timeout](https://www.rabbitmq.com/consumers.html#acknowledgement-timeout) default has been bumped to 30 minutes (previously 15 minutes). The value
+ can be overridden.
+
+ GitHub issue: [#3033](https://github.com/rabbitmq/rabbitmq-server/pull/3033)
+
+#### Bug Fixes
+
+ * A classic queue with [single active consumer](https://www.rabbitmq.com/consumers.html#single-active-consumer) option enabled could run into
+ an exception after a restart or initial enablement of the management plugin.
+
+ GitHub issue: [#3072](https://github.com/rabbitmq/rabbitmq-server/issues/3072)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * A combination of HTTPS (TLS) settings and advanced HTTP server settings
+ resulted in an exception.
+
+ GitHub issue: [#3039](https://github.com/rabbitmq/rabbitmq-server/pull/3039)
+
+ * External stats collector is now restarted again if it fails for any reason.
+
+ Contributed by @luos.
+
+ GitHub issue: [#3040](https://github.com/rabbitmq/rabbitmq-server/pull/3040)
+
+
+### Consistent History Exchange Plugin
+
+#### Enhancements
+
+ * When nodes are restarted, schema database tables used by this plugin are now reconciled
+ with cluster peers the [same way RabbitMQ core does it](https://www.rabbitmq.com/clustering.html#restarting).
+
+ GitHub issue: [#3067](https://github.com/rabbitmq/rabbitmq-server/pull/3067)
+
+
+### Recent History Exchange Plugin
+
+#### Enhancements
+
+ * When nodes are restarted, schema database tables used by this plugin are now reconciled
+ with cluster peers the [same way RabbitMQ core does it](https://www.rabbitmq.com/clustering.html#restarting).
+
+ GitHub issue: [#3067](https://github.com/rabbitmq/rabbitmq-server/pull/3067)
+
+
+### Delayed Message Exchange Plugin
+
+#### Enhancements
+
+ * When nodes are restarted, schema database tables used by this plugin are now reconciled
+ with cluster peers the [same way RabbitMQ core does it](https://www.rabbitmq.com/clustering.html#restarting).
+
+ Contributed by @mwfriedm.
+
+ GitHub issue: [rabbitmq/rabbitmq-delayed-message-exchange#163](https://github.com/rabbitmq/rabbitmq-delayed-message-exchange/pull/163)
+
+
+### Last Value Cache Exchange Plugin
+
+#### Enhancements
+
+ * When nodes are restarted, schema database tables used by this plugin are now reconciled
+ with cluster peers the [same way RabbitMQ core does it](https://www.rabbitmq.com/clustering.html#restarting).
+
+ GitHub issue: [rabbitmq/rabbitmq-lvc-exchange#28](https://github.com/rabbitmq/rabbitmq-lvc-exchange/issues/28)
+
+
+
+### Web MQTT Plugin
+
+#### Bug Fixes
+
+ * MQTT-over-WebSockets client connections were not accounted for by the file handle tracking mechanism (metric and resource alarm).
+
+ GitHub issue: [#3076](https://github.com/rabbitmq/rabbitmq-server/pull/3076)
+
+
+## Dependency Upgrades
+
+None in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.17.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.18.md b/release-notes/3.8.18.md
new file mode 100644
index 0000000000..6b4e121318
--- /dev/null
+++ b/release-notes/3.8.18.md
@@ -0,0 +1,179 @@
+## RabbitMQ 3.8.18
+
+RabbitMQ `3.8.18` is a maintenance release that includes a security patch.
+
+This release addresses an undisclosed vulnerability with CVSS 3.1 [score of 3.1](https://www.first.org/cvss/calculator/3.1#CVSS:3.1/AV:N/AC:H/PR:H/UI:R/S:U/C:L/I:L/A:N/E:P/RL:O/RC:C) (low): [CVE-2021-32719](https://github.com/rabbitmq/rabbitmq-server/security/advisories/GHSA-5452-hxj4-773x)
+
+Our team would like to thank Fahimhusain Raydurg for [responsibly disclosing](https://www.rabbitmq.com/contact.html#security)
+the vulnerability and Patrik Ragnarsson from CloudAMQP for contributing a fix.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Enhancements
+
+ * [Peer discovery](https://www.rabbitmq.com/cluster-formation.html) no longer uses randomized delays to avoid the inherent race condition
+ during [initial cluster formation](https://www.rabbitmq.com/cluster-formation.html#initial-formation-race-condition). Instead it relies on an internal
+ distributed locking mechanism available in modern Erlang releases.
+
+ While randomized startup delay was reasonably effective at ensuring only one cluster was formed even during parallel node
+ startup, it by definition assumes that with some low probability more than one node can still start as seed nodes,
+ and be joined by different groups of cluster peers. The locking mechanism does not have this downside.
+ Now that RabbitMQ requires Erlang 23.2, the internal locking library becomes an option for peer discovery.
+ Note that etcd and Consul-based peer discovery still use those service's support for distributed locks.
+
+ `cluster_formation.randomized_startup_delay_range.min` and `cluster_formation.randomized_startup_delay_range.max` configuration keys
+ will no longer have any effect.
+
+ GitHub issue: [#3075](https://github.com/rabbitmq/rabbitmq-server/pull/3075)
+
+ * Node startup banner now includes Erlang version, runtime operation mode (JIT vs. emulator),
+ and underlying crypto library version (e.g. OpenSSL 1.1.1k) it was built against.
+
+ GitHub issue: [#2777](https://github.com/rabbitmq/rabbitmq-server/pull/2777)
+
+ * Build system targets now use `hostname(1)` in a way that is compatible with Solaris.
+
+ Contributed by Pavel @tropikhajma Heimlich.
+
+ GitHub issue: [#3117](https://github.com/rabbitmq/rabbitmq-server/pull/3117)
+
+#### Bug Fixes
+
+ * Queue deletion could run into an exception in some rare cases.
+
+ Contributed by @tomyouyou.
+
+ GitHub issue: [#3086](https://github.com/rabbitmq/rabbitmq-server/issues/3086)
+
+
+### Prometheus Plugin
+
+#### Bug Fixes
+
+ * In certain double stack (IPv4 and IPv6) environments, the plugin would start a single listener
+ but register two internally. That extra non-existent listener would make `rabbitmq-diagnostics drain`
+ fail as it disables all registered listeners.
+
+ GitHub issue: [#3021](https://github.com/rabbitmq/rabbitmq-server/pull/3021)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * In certain double stack (IPv4 and IPv6) environments, the plugin would start a single listener
+ but register two internally. That extra non-existent listener would make `rabbitmq-diagnostics drain`
+ fail as it disables all registered listeners.
+
+ GitHub issue: [#3021](https://github.com/rabbitmq/rabbitmq-server/pull/3021)
+
+### Kubernetes Peer Discovery Plugin
+
+#### Enhancements
+
+ * Kubernetes [peer discovery](https://www.rabbitmq.com/cluster-formation.html) mechanism no longer uses randomized delays to avoid the inherent race condition
+ during [initial cluster formation](https://www.rabbitmq.com/cluster-formation.html#initial-formation-race-condition). Instead it relies on an internal
+ distributed locking mechanism available in modern Erlang releases.
+
+ While randomized startup delay was reasonably effective at ensuring only one cluster was formed even during parallel node
+ startup, it by definitely assumes that with some low probability more than one node can still start as seed nodes,
+ and be joined by different groups of cluster peers. The locking mechanism does not have this downside.
+ Now that RabbitMQ requires Erlang 23.2, the internal locking library becomes an option for peer discovery.
+
+ `cluster_formation.randomized_startup_delay_range.min` and `cluster_formation.randomized_startup_delay_range.max` configuration keys
+ will no longer have any effect.
+
+ GitHub issue: [#3075](https://github.com/rabbitmq/rabbitmq-server/pull/3075)
+
+
+### AWS Peer Discovery Plugin
+
+#### Enhancements
+
+ * AWS [peer discovery](https://www.rabbitmq.com/cluster-formation.html) mechanism no longer uses randomized delays to avoid the inherent race condition
+ during [initial cluster formation](https://www.rabbitmq.com/cluster-formation.html#initial-formation-race-condition). Instead it relies on an internal
+ distributed locking mechanism available in modern Erlang releases.
+
+ While randomized startup delay was reasonably effective at ensuring only one cluster was formed even during parallel node
+ startup, it by definitely assumes that with some low probability more than one node can still start as seed nodes,
+ and be joined by different groups of cluster peers. The locking mechanism does not have this downside.
+ Now that RabbitMQ requires Erlang 23.2, the internal locking library becomes an option for peer discovery.
+
+ `cluster_formation.randomized_startup_delay_range.min` and `cluster_formation.randomized_startup_delay_range.max` configuration keys
+ will no longer have any effect.
+
+ GitHub issue: [#3075](https://github.com/rabbitmq/rabbitmq-server/pull/3075)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Queue federation will now declare queues on the upstream side only if it does not already
+ exist. This makes it possible to federate pre-declared queues of different types.
+
+ Note that such setups should be considered edge cases. Queue federation was never meant
+ to federated queues of different types under the same name in different clusters.
+ Such setups could be useful in [Blue/Green deployment upgrade](https://www.rabbitmq.com/blue-green-upgrade.html) scenarios but
+ otherwise should be avoided.
+
+ GitHub issue: [#3107](https://github.com/rabbitmq/rabbitmq-server/pull/3107)
+
+
+### AuthN/AuthZ Backend HTTP Plugin
+
+#### Bug Fixes
+
+ * `auth_http.connection_timeout` and `auth_http.request_timeout` are two new configuration parameters that control HTTP client connection
+ and overall request timeouts. They both default to 15 seconds. The value can be increased for services that can occasionally slow down
+ significantly.
+
+ Contributed by Miłosz @SzumiecM Szumiec.
+
+ GitHub issue: [#3140](https://github.com/rabbitmq/rabbitmq-server/pull/3140)
+
+
+## Dependency Upgrades
+
+ * `jose` was updated [from `1.11.1` to `2b1d66b5f4`](https://github.com/potatosalad/erlang-jose/commit/2b1d66b5f4fbe33cb198149a8cb23895a2c877ea)
+ * `jsx` was updated [from `2.11.0` to `3.1.0`](https://github.com/talentdeficit/jsx/blob/main/CHANGES.md)
+ * `observer_cli` was updated [from `1.6.1` to `1.6.2`](https://github.com/zhongwencool/observer_cli/compare/1.6.1..1.6.2)
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.18.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.19.md b/release-notes/3.8.19.md
new file mode 100644
index 0000000000..adecfe43db
--- /dev/null
+++ b/release-notes/3.8.19.md
@@ -0,0 +1,79 @@
+## RabbitMQ 3.8.19
+
+RabbitMQ `3.8.19` is a maintenance release.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Enhancements
+
+ * Binding recovery on node startup is now more efficient. This means in clusters with a lot of
+ queues and/or bindings, nodes start quicker.
+
+ GitHub issue: [#3137](https://github.com/rabbitmq/rabbitmq-server/pull/3137)
+
+ * Plugin directory paths are now deduplicated, so if a single directory is specified
+ more than ones, the node won't complain about duplicate plugins.
+
+ GitHub issue: [#3155](https://github.com/rabbitmq/rabbitmq-server/issues/3155)
+
+ * Startup banner and log entries now use path separators more consistently on Windows.
+
+ Thanks to Linda @LindaLawton Lawton for pointing out the inconsistency.
+
+ GitHub issue: [#3149](https://github.com/rabbitmq/rabbitmq-server/issues/3149)
+
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * When dynamic Shovels are started the plugin now acquires a distributed lock.
+ This is generally not necessary but helps avoid duplicate Shovels being started
+ via import of a definition file on every cluster node during cluster formation.
+
+ GitHub issue: [#3167](https://github.com/rabbitmq/rabbitmq-server/pull/3167)
+
+
+## Dependency Upgrades
+
+No dependency changes in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.19.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.2.md b/release-notes/3.8.2.md
new file mode 100644
index 0000000000..4de05b195c
--- /dev/null
+++ b/release-notes/3.8.2.md
@@ -0,0 +1,204 @@
+## RabbitMQ 3.8.2
+
+RabbitMQ `3.8.2` is a maintenance release.
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.x` series is recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release from `3.7.6` or an older version, extra care has to be taken.
+
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded at the same time as Erlang**. Alternatively the node can be upgraded
+to `3.7.18` first, then Erlang 21.x or 22.x, then to RabbitMQ 3.8.x.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade
+and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades
+and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the
+[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes
+
+### Core Server
+
+#### Enhancements
+
+ * Raft implementation optimizations.
+
+ GitHub issues: [rabbitmq/ra#137](https://github.com/rabbitmq/ra/pull/137), [rabbitmq/ra#148](https://github.com/rabbitmq/ra/pull/148)
+
+ * Quorum queue optimization: enables local (not going through the leader) delivery from Raft followers
+ when appropriate and safe.
+
+ GitHub issues: [rabbitmq/rabbitmq-server#2146](https://github.com/rabbitmq/rabbitmq-server/pull/2146), [rabbitmq/ra#132](https://github.com/rabbitmq/ra/pull/132)
+
+ * If `x-queue-type` argument is not provided at queue declaration time, the type is assumed to be `classic`
+ instead of missing.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#341](https://github.com/rabbitmq/rabbitmq-common/issues/341)
+
+ * Quorum queue consumer timeout now can be configured using new style configuration file:
+
+ ``` ini
+ # Hard timeout for quorum queue consumer acknowledgemnts of two minutes
+ consumer_timeout = 120000
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2163](https://github.com/rabbitmq/rabbitmq-server/pull/2163)
+
+#### Bug Fixes
+
+ * A quorum queue could produce an empty Raft log segment if node is killed at a particular moment in time,
+ which would prevent the node from successfully recovering the log after restart.
+
+ GitHub issues: [rabbitmq/ra#138](https://github.com/rabbitmq/ra/pull/138)
+
+ * Quorum queue consumer count metric could be duplicated when reported via HTTP API or to a Prometheus scraper.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2176](https://github.com/rabbitmq/rabbitmq-server/pull/2176)
+
+
+### CLI Tools
+
+#### Enhancements
+
+ * It is now possible to [export and import definitions]() without the use of plugins with
+ `rabbitmqctl export_definitions` and `rabbitmqctl import_definitions`:
+
+ ``` sh
+ # export as a JSON file
+ rabbitmqctl export_definitions /path/to/target.file.json
+
+ # export as JSON to standard output and pipe to jq
+ rabbitmqctl export_definitions "-" | jq
+
+ # export as a compressed Erlang term file
+ rabbitmqctl export_definitions /path/to/target.file --format=erlang
+
+ # learn more
+ rabbitmqctl help export_definitions
+ ```
+
+ ``` sh
+ # import from a JSON file
+ rabbitmqctl import_definitions /path/to/target.file.json
+
+ # import JSON from standard input
+ cat /path/to/definitions.json | rabbitmqctl import_definitions "-"
+
+ # import from a compressed Erlang term file
+ rabbitmqctl import_definitions /path/to/target.file --format=erlang
+
+ # learn more
+ rabbitmqctl help import_definitions
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-management#749](https://github.com/rabbitmq/rabbitmq-management/issues/749)
+
+ * Entities with `amq.*` prefixes are now skipped during import instead of producing an error.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2170](https://github.com/rabbitmq/rabbitmq-server/issues/2170)
+
+#### Bug Fixes
+
+ * `rabbitmqctl await_startup` failed with an exception when RabbitMQ application was stopped but the
+ runtime (Erlang VM) was running.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2158](https://github.com/rabbitmq/rabbitmq-server/issues/2158)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Definition import via HTTP API could fail in if performed via HTTP API (but not management UI)
+ and contained operator policies.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#751](https://github.com/rabbitmq/rabbitmq-management/issues/751)
+
+#### Enhancements
+
+ * Metric aggregation optimizations.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-agent#84](https://github.com/rabbitmq/rabbitmq-management-agent/pull/84)
+
+
+### MQTT Plugin
+
+#### Enhancements
+
+ * Throughput improvements ranging from 14 to 60 percent depending on workload.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2168](https://github.com/rabbitmq/rabbitmq-server/issues/2168)
+
+ * Throughput improvements and reduced CPU usage but slightly higher per connection RAM footprint.
+
+ GitHub issues: [rabbitmq/rabbitmq-mqtt#216](https://github.com/rabbitmq/rabbitmq-mqtt/pull/216), [rabbitmq/rabbitmq-mqtt#217](https://github.com/rabbitmq/rabbitmq-mqtt/pull/217)
+
+#### Bug Fixes
+
+ * Client ID tracker could produce an empty Raft log segment if node is killed at a particular moment in time,
+ which would prevent the node from successfully recovering the log after restart.
+
+ GitHub issues: [rabbitmq/discussions#4](https://github.com/rabbitmq/discussions/issues/4), [rabbitmq/ra#138](https://github.com/rabbitmq/ra/pull/138)
+
+ * Last Will messages that use QoS 2 will now be downgraded to QoS 1 just like with "regular" published messages.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#214](https://github.com/rabbitmq/rabbitmq-mqtt/issues/214)
+
+
+### AMQP 1.0 Plugin
+
+#### Enhancements
+
+ * Throughput improvements.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2168](https://github.com/rabbitmq/rabbitmq-server/issues/2168)
+
+
+### STOMP Plugin
+
+#### Enhancements
+
+ * Throughput improvements.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2168](https://github.com/rabbitmq/rabbitmq-server/issues/2168)
+
+
+### Web STOMP Plugin
+
+#### Enhancements
+
+ * Clients now can authenticate using an x.509 (TLS) certificate.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#116](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/116)
+
+
+### Prometheus Plugin
+
+ * A small number of queue metrics were not read from the metric store correctly.
+
+ GitHub issue: [rabbitmq/rabbitmq-prometheus#19](https://github.com/rabbitmq/rabbitmq-prometheus/issues/19)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker,
+not the plugins or the client libraries. Please download the archive named `rabbitmq-server-3.8.2.tar.xz`.
diff --git a/release-notes/3.8.20.md b/release-notes/3.8.20.md
new file mode 100644
index 0000000000..0cf318177b
--- /dev/null
+++ b/release-notes/3.8.20.md
@@ -0,0 +1,124 @@
+## RabbitMQ 3.8.20
+
+RabbitMQ `3.8.20` is a maintenance release.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.8.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+### Core Server
+
+#### Enhancements
+
+ * Nodes failed to start when hostname contained non-ASCII (broader Unicode) characters.
+
+ GitHub issue: [#3239](https://github.com/rabbitmq/rabbitmq-server/pull/3239)
+
+ * TLS information delivered in [Proxy protocol](https://www.rabbitmq.com/networking.html#proxy-protocol) header
+ is now attached to connection metrics as if it was provided by a non-proxying client.
+
+ GitHub issue: [#3175](https://github.com/rabbitmq/rabbitmq-server/pull/3175) contributed by @prefiks
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmq-upgrade drain` and `rabbitmq-upgrade revive` now log and `warning` and `info`
+ level instead of `alert` (that can show up as `error`).
+
+ GitHub issue: [#3266](https://github.com/rabbitmq/rabbitmq-server/pull/3266)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * HTTP API now includes `WWW-Authentiate` header into 401 responses
+ to requests that contain invalid credentials
+
+ GitHub issue: [#3181](https://github.com/rabbitmq/rabbitmq-server/issues/3181)
+
+ * Queue name is now validated for length according to the AMQP 0-9-1 spec.
+
+ GitHub issue: [#3202](https://github.com/rabbitmq/rabbitmq-server/pull/3202)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Multiple Shovels could be started in some cases involving node restarts of failures.
+
+ GitHub issue: [#3260](https://github.com/rabbitmq/rabbitmq-server/issues/3260)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Multiple Federation links could be started in some cases involving node restarts of failures.
+
+ GitHub issue: [#3260](https://github.com/rabbitmq/rabbitmq-server/issues/3260)
+
+
+### RabbitMQ Erlang Client
+
+#### Bug Fixes
+
+ * New releases of the client are again published to Hex.pm.
+
+ GitHub issue: [#3247](https://github.com/rabbitmq/rabbitmq-server/pull/3247)
+
+ * `connection_timeout` was adjusted to avoid a confusing warning.
+
+ GitHub issue: [#3232](https://github.com/rabbitmq/rabbitmq-server/pull/3232) contributed by @britto
+
+ * Corrected a typo in direct connection net tick time adjustment.
+
+ GitHub issue: [#3233](https://github.com/rabbitmq/rabbitmq-server/pull/3233) contributed by @britto
+
+
+
+## Dependency Upgrades
+
+No dependency changes in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.20.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.21.md b/release-notes/3.8.21.md
new file mode 100644
index 0000000000..ac63aeceb7
--- /dev/null
+++ b/release-notes/3.8.21.md
@@ -0,0 +1,62 @@
+## RabbitMQ 3.8.21
+
+RabbitMQ `3.8.21` is a maintenance release that addresses a regression introduced in `3.8.17`.
+All users are recommended to upgrade to this release.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.8.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+### Core Server
+
+#### Bug Fixes
+
+ * Queues that had messages with per-message TTL in them could fail to recover their indices
+ after a restart.
+
+ GitHub issues: [#3284](https://github.com/rabbitmq/rabbitmq-server/pull/3284), [#3272](https://github.com/rabbitmq/rabbitmq-server/discussions/3272)
+
+
+
+## Dependency Upgrades
+
+No dependency changes in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.21.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.22.md b/release-notes/3.8.22.md
new file mode 100644
index 0000000000..45037b3e52
--- /dev/null
+++ b/release-notes/3.8.22.md
@@ -0,0 +1,83 @@
+## RabbitMQ 3.8.22
+
+RabbitMQ `3.8.22` is a maintenance release.
+All users are recommended to upgrade to this release.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.8.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * Virtual host metadata (description, tags) was not imported from definitions.
+
+ GitHub issue: [#3333](https://github.com/rabbitmq/rabbitmq-server/pull/3333)
+
+
+### AWS Peer Discovery Plugin
+
+#### Enhancements
+
+ * AWS API calls are now retried multiple times.
+
+ Contributed by AWS.
+
+ GitHub issue: [#3329](https://github.com/rabbitmq/rabbitmq-server/pull/3329)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * `PUT /api/vhosts/{name}` now can update metadata (tags and descriptions) for existing
+ virtual hosts.
+
+ GitHub issue: [#3319](https://github.com/rabbitmq/rabbitmq-server/pull/3319)
+
+
+
+## Dependency Upgrades
+
+No dependency changes in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.22.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.23.md b/release-notes/3.8.23.md
new file mode 100644
index 0000000000..8e4ac5d7cd
--- /dev/null
+++ b/release-notes/3.8.23.md
@@ -0,0 +1,72 @@
+## RabbitMQ 3.8.23
+
+RabbitMQ `3.8.23` is a maintenance release.
+All users are recommended to upgrade to this release.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.8.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * TLS information delivered in [Proxy protocol](https://www.rabbitmq.com/networking.html#proxy-protocol) header is now attached to connection metrics as if it was provided by a non-proxying client.
+
+ GitHub issue: [#3175](https://github.com/rabbitmq/rabbitmq-server/pull/3175) contributed by @prefiks, sponsored by CloudAMQP
+
+ * Classic queue shutdown now uses a much higher timeout (up to 10 minutes instead of 30 seconds).
+
+ In environments with many queues (especially mirrored queues) and many consumers this means that
+ the chance of queue indices rebuilding after node restart is now substantially lower.
+
+ GitHub issue: [#3409](https://github.com/rabbitmq/rabbitmq-server/pull/3409)
+
+* Quorum queues no longer leak memory and disk space when a consumer is repeatedly added and cancelled on an empty queue.
+
+ GitHub issue: [#3445](https://github.com/rabbitmq/rabbitmq-server/issues/3445)
+
+
+## Dependency Upgrades
+
+ * observer_cli has been upgraded from `1.6.2` to `1.7.1`
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.23.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.8.24.md b/release-notes/3.8.24.md
new file mode 100644
index 0000000000..eefc4c305b
--- /dev/null
+++ b/release-notes/3.8.24.md
@@ -0,0 +1,139 @@
+## RabbitMQ 3.8.24
+
+RabbitMQ `3.8.24` is a maintenance release.
+All users are recommended to upgrade to this release.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.8.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * Fixed an issue where node monitor could produce a false network partition when
+ one of the cluster members was restarted
+
+ GitHub issue: [#3631](https://github.com/rabbitmq/rabbitmq-server/pull/3631)
+
+* `rabbitmq-diagnostics memory_breakdown` failed to read memory of connection
+ reader, writer and channel processes.
+
+ GitHub issue: [#3570](https://github.com/rabbitmq/rabbitmq-server/issues/3570)
+
+ * Queue leader rebalancing now logs less
+
+ GitHub issue: [#3643](https://github.com/rabbitmq/rabbitmq-server/pull/3643)
+
+ * Removed some redundant code
+
+ Contributed by @Ayanda-D
+
+ GitHub issue: [#https://github.com/rabbitmq/rabbitmq-server/pull/3506](https://github.com/rabbitmq/rabbitmq-server/pull/3506)
+
+ * Info message grammar
+
+ Contributed by @tuxiqae
+
+ GitHub issue: [#3680](https://github.com/rabbitmq/rabbitmq-server/pull/3680)
+
+#### Enhancements
+
+ * `cluster_formation.target_cluster_size_hint` is a new configuration setting that can be used to specify expected initial cluster size.
+ This can be used by features, plugins or tools that expect a certain minimum number of clusters nodes
+ to join during initial cluster formation
+
+ GitHub issue: [#3635](https://github.com/rabbitmq/rabbitmq-server/pull/3635)
+
+
+### Prometheus Plugin
+
+#### Bug Fixes
+
+ * TLS-enabled Prometheus endpoint listener port was not correctly stored internally
+
+ GitHub issue: [#2975](https://github.com/rabbitmq/rabbitmq-server/issues/2975)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Persistent message count is now displayed correctly on individual queue pages
+
+ GitHub issue: [#3598](https://github.com/rabbitmq/rabbitmq-server/pull/3598)
+
+ * When setting topic permissions, the list of exchanges in the UI now honors the
+ currently selected virtual host.
+
+ Contributed by @LuisCusihuaman.
+
+ GitHub issues: [#3545](https://github.com/rabbitmq/rabbitmq-server/pull/3545), [#3689](https://github.com/rabbitmq/rabbitmq-server/pull/3689)
+
+
+### AWS Peer Discovery Plugin
+
+#### Enhancements
+
+ * The plugin now logs more details for failed AWS API requests.
+
+ Contributed by @tvhong-amazon (AWS)
+
+ GitHub issue: [#3579](https://github.com/rabbitmq/rabbitmq-server/pull/3579)
+
+
+### Consistent Hashing Exchange Plugin
+
+#### Bug Fixes
+
+ * Corrected deletion of duplicate bindings between an exchange and a queue
+
+ Contributed by @FalconerTC
+
+ GitHub issue: [#3594](https://github.com/rabbitmq/rabbitmq-server/pull/3594)
+
+
+## Dependency Upgrades
+
+ None in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.24.tar.xz`
+instead of the source tarball produced by GitHub.
+
diff --git a/release-notes/3.8.25.md b/release-notes/3.8.25.md
new file mode 100644
index 0000000000..892f35bfe1
--- /dev/null
+++ b/release-notes/3.8.25.md
@@ -0,0 +1,67 @@
+## RabbitMQ 3.8.25
+
+RabbitMQ `3.8.25` is a maintenance release.
+All users are recommended to upgrade to this release.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.8.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * New Prometheus metrics for alarms:
+ * `rabbitmq_alarms_file_descriptor_limit` 1|0
+ * `rabbitmq_alarms_free_disk_space_watermark` 1|0
+ * `rabbitmq_alarms_memory_used_watermark` 1|0
+
+ While some of the alarms have cluster-wide effect, these metrics are node-local.
+
+ GitHub issue: [#2653](https://github.com/rabbitmq/rabbitmq-server/pull/2653)
+
+
+## Dependency Upgrades
+
+ None in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.25.tar.xz`
+instead of the source tarball produced by GitHub.
+
diff --git a/release-notes/3.8.26.md b/release-notes/3.8.26.md
new file mode 100644
index 0000000000..eadaf7cae8
--- /dev/null
+++ b/release-notes/3.8.26.md
@@ -0,0 +1,64 @@
+## RabbitMQ 3.8.26
+
+RabbitMQ `3.8.26` is a maintenance release.
+All users are recommended to upgrade to this release.
+
+### Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/),
+and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) and [supports Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/).
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision modern Erlang versions.
+
+
+## Upgrade and Compatibility Notes
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+If upgrading from a`3.7.x` release, see [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0)
+upgrade and compatibility notes first.
+
+If upgrading from a `3.6.x` or older [release series](https://www.rabbitmq.com/versions.html), first upgrade
+to [`3.7.27`](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.7.27) and then to this version.
+
+
+## Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users)
+and [RabbitMQ community Slack](https://rabbitmq-slack.herokuapp.com/).
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.8.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+
+### OAuth 2 Plugin
+
+#### Bug Fixes
+
+ * Signing keys specified in `rabbitmq.conf` were not translated correctly,
+ resulting in exceptions during permission checks.
+
+ GitHub issue: [#3759](https://github.com/rabbitmq/rabbitmq-server/pull/3759)
+
+
+
+## Dependency Upgrades
+
+ None in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.8.26.tar.xz`
+instead of the source tarball produced by GitHub.
+
diff --git a/release-notes/3.8.3.md b/release-notes/3.8.3.md
new file mode 100644
index 0000000000..b45cfc6522
--- /dev/null
+++ b/release-notes/3.8.3.md
@@ -0,0 +1,305 @@
+## RabbitMQ 3.8.3
+
+RabbitMQ `3.8.3` is a maintenance release.
+
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.x` series is recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains what package repositories and tools can be used to provision latest patch versions of Erlang `21.3.x` and `22.x`.
+
+
+### Compatibility Notes
+
+#### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release from `3.7.6` or an older version, extra care has to be taken.
+
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later, RabbitMQ **must be upgraded at the same time as Erlang**.
+Alternatively the node can be upgraded to `3.7.18` first, then Erlang 21.x or 22.x, then to RabbitMQ 3.8.x.
+
+#### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+
+## Changes
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * Reduced scheduled GC activity in connection socket writer to one run per 1 GiB of data transferred, with an option to change the value or disable scheduled run entirely.
+ More frequent binary heap GC runs seem unneccesary on Erlang 22.
+
+ Contributed by Aleksander Nycz.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#343](https://github.com/rabbitmq/rabbitmq-common/pull/343)
+
+ * Eliminated an inefficiency in recovery of quorum queues with a backlog of messages.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2260](https://github.com/rabbitmq/rabbitmq-server/pull/2260)
+
+ * In a case where a node hosting a quorum queue replica went offline and was removed from the cluster,
+ and later came back, quorum queues could enter a loop of Raft leader elections.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2255](https://github.com/rabbitmq/rabbitmq-server/pull/2255)
+
+ * Quorum queues with a dead lettering could fail to recover.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2256](https://github.com/rabbitmq/rabbitmq-server/pull/2256)
+
+ * The node now can recover even if virtual host recovery terms file was corrupted.
+
+ Contributed by @tomyouyou.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2195](https://github.com/rabbitmq/rabbitmq-server/pull/2195)
+
+ * Autoheal could fail to finish if one of its state transitions initiated by a remote node timed out.
+
+ Contributed by @tomyouyou.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2209](https://github.com/rabbitmq/rabbitmq-server/pull/2209)
+
+ * Syslog client is now started even when Syslog logging is configured only for some log sinks.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2044](https://github.com/rabbitmq/rabbitmq-server/issues/2044)
+
+ * Policies that quorum queues ignored were still listed as applied to them.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2210](https://github.com/rabbitmq/rabbitmq-server/pull/2210)
+
+ * If a quorum queue leader rebalancing operation timed out, CLI tools failed with an exception instead of a sensible internal API response.
+
+ Contributed by Gabriele Santomaggio.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2214](https://github.com/rabbitmq/rabbitmq-server/issues/2214)
+
+ * Handle timeout error on the rebalance function.
+
+ Contributed by Gabriele Santomaggio.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2218](https://github.com/rabbitmq/rabbitmq-server/pull/2218)
+
+ * Handle and raise protocol error for absent queues assumed to be alive.
+
+ Contributed by Ayanda Dube.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2233](https://github.com/rabbitmq/rabbitmq-server/pull/2233)
+
+#### Enhancements
+
+ * Some Proxy protocol errors are now logged at debug level.
+ This reduces log noise in environments where TCP load balancers and proxies perform health checks by opening a TCP connection but never sending any data.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2204](https://github.com/rabbitmq/rabbitmq-server/pull/2204)
+
+ * Quorum queue deletion operation no longer supports the "if unused" and "if empty" options.
+ They are typically used for transient queues don't make much sense for quorum ones.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2211](https://github.com/rabbitmq/rabbitmq-server/pull/2211)
+
+ * Do not treat applications that do not depend on rabbit as plugins.
+ This is especially important for applications that should not be stopped before rabbit is stopped.
+
+ GitHub Issue: [rabbitmq/rabbitmq-server#2212](https://github.com/rabbitmq/rabbitmq-server/pull/2212)
+
+ * RabbitMQ nodes will now gracefully shutdown when receiving a `SIGTERM` signal.
+ Previously the runtime would invoke a default handler that terminates the VM giving RabbitMQ no chance to execute its shutdown steps.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2222](https://github.com/rabbitmq/rabbitmq-server/issues/2222)
+
+ * Every cluster now features a persistent internal cluster ID that can be used by core features or plugins.
+ Unlike the human-readable cluster name, the value cannot be overriden by the user.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2226](https://github.com/rabbitmq/rabbitmq-server/pull/2226)
+
+ * Speedup execution of boot steps by a factor of 2N, where N is the number of attributes per step.
+
+ Contributed by Ayanda Dube.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2246](https://github.com/rabbitmq/rabbitmq-server/pull/2246)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmq-diagnostics status` failed to display the results when executed against a node that had high VM watermark set as an absolute value (using `vm_memory_high_watermark.absolute`).
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#395](https://github.com/rabbitmq/rabbitmq-cli/issues/395)
+
+#### Enhancements
+
+ * New health checks that can be used to determine if it's a good moment to shut down a node for an upgrade.
+
+ ``` sh
+ # Exits with a non-zero code if target node hosts leader replica of at least one queue
+ # that has out-of-sync mirror.
+ rabbitmq-diagnostics check_if_node_is_mirror_sync_critical
+
+ # Exits with a non-zero code if one or more quorum queues will lose online quorum
+ # should target node be shut down
+ rabbitmq-diagnostics check_if_node_is_quorum_critical
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#389](https://github.com/rabbitmq/rabbitmq-cli/issues/389)
+
+ * Some proxy protocol errors are now logged at debug level.
+ * This reduces log noise in environments where TCP load balancers and proxies perform health checks by opening a TCP connection but never sending any data.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2204](https://github.com/rabbitmq/rabbitmq-server/pull/2204)
+
+
+### Management and Management Agent Plugins
+
+#### Bug Fixes
+
+ * Consumer section on individual page was unintentionally hidden.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#758](https://github.com/rabbitmq/rabbitmq-management/issues/758)
+
+ * Fix queue-type select by adding unsafe-inline CSP policy.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#769](https://github.com/rabbitmq/rabbitmq-management/pull/769)
+
+#### Enhancements
+
+ * An undocumented "automagic login" feature on the login form was removed.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#748](https://github.com/rabbitmq/rabbitmq-management/pull/748)
+
+ * A new `POST /login` endpoint can be used by custom management UI login forms to authenticate the user and set the cookie.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#764](https://github.com/rabbitmq/rabbitmq-management/issues/764)
+
+ * A new `POST /rebalance/queues` endpoint that is the HTTP API equivalent of `rabbitmq-queues rebalance`
+
+ GitHub issue: [rabbitmq/rabbitmq-management#782](https://github.com/rabbitmq/rabbitmq-management/issues/782)
+
+ * Warning about a missing `handle.exe` in `PATH` on Windows is now only logged every 10 minutes.
+
+ GitHub issue: [rabbitmq/rabbitmq-management-agent#90](https://github.com/rabbitmq/rabbitmq-management-agent/issues/90)
+
+ * `rabbitmqadmin declare queue` now supports a new `queue_type` parameter to simplify declaration of quorum queues.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#761](https://github.com/rabbitmq/rabbitmq-management/issues/761)
+
+ * HTTP API request log entries now includes acting user.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#760](https://github.com/rabbitmq/rabbitmq-management/issues/760)
+
+ * [Content Security Policy headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP) are now also set for static assets such as JavaScript files.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#767](https://github.com/rabbitmq/rabbitmq-management/issues/767)
+
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * Add option to aggregate metrics for channels, queues & connections. Metrics are now aggregated by default (safe by default).
+ This new behaviour can be disabled via `prometheus.return_per_object_metrics = true` config.
+
+ GitHub issue: [rabbitmq/rabbitmq-prometheus#28](https://github.com/rabbitmq/rabbitmq-prometheus/issues/28)
+
+
+### Kubernetes Peer Discovery Plugin
+
+#### Enhancements
+
+ * The plugin will now notify [Kubernetes API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.16/#event-v1-core) of node startup and peer stop/unavailability events.
+
+ Contributed by Gabriele Santomaggio.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-k8s#61](https://github.com/rabbitmq/rabbitmq-peer-discovery-k8s/pull/61)
+
+
+### Etcd Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * Only run healthcheck when backend is configured.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#21](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/pull/21)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Use vhost to delete federated exchange.
+
+ [Reported by Brett Janer](https://groups.google.com/forum/#!topic/rabbitmq-users/nfulekZc_OQ/discussion).
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#99](https://github.com/rabbitmq/rabbitmq-federation/pull/99)
+
+#### Enhancements
+
+ * "Command" operations such as binding propagation now use a separate channel for all links, preventing latency spikes for asynchronous operations (such as message publishing) (a head-of-line blocking problem).
+
+ Contributed by Grigory Starinkin.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#97](https://github.com/rabbitmq/rabbitmq-federation/pull/97)
+
+
+### Auth Backend OAuth 2 Plugin
+
+ * Additional scopes can be fetched from a predefined JWT token field.
+ Those scopes will be combined with the standard scopes field.
+
+ Contributed by @papugamichal.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-oauth2#41](https://github.com/rabbitmq/rabbitmq-auth-backend-oauth2/pull/41)
+
+
+### Trust Store Plugin
+
+#### Enhancements
+
+ * HTTPS certificate provider will not longer terminate if upstream service response contains invalid JSON.
+
+ GitHub issue: [rabbitmq/rabbitmq-trust-store#73](https://github.com/rabbitmq/rabbitmq-trust-store/issues/73)
+
+
+### MQTT Plugin
+
+#### Enhancements
+
+ * Avoid blocking when registering or unregistering a client ID.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#226](https://github.com/rabbitmq/rabbitmq-mqtt/pull/226)
+
+
+### AMQP 1.0 Client Plugin
+
+#### Enhancements
+
+ * Handle heartbeat in `close_sent/2`.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#44](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/pull/44)
+
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.3.tar.xz`.
+
+
+
+## Dependency upgrades
+
+ * observer-cli was upgraded [from v1.5.2 to v1.5.3](https://github.com/zhongwencool/observer_cli/compare/1.5.2...1.5.3)
+ * prometheus.erl was upgraded [from v4.4.0 to v4.5.0](https://github.com/deadtrickster/prometheus.erl/compare/v4.4.0...v4.5.0)
+ * ra was upgraded [from v1.0.5 to v1.0.8](https://github.com/rabbitmq/ra/compare/v1.0.5...v1.0.8)
diff --git a/release-notes/3.8.4.md b/release-notes/3.8.4.md
new file mode 100644
index 0000000000..13e8d55492
--- /dev/null
+++ b/release-notes/3.8.4.md
@@ -0,0 +1,413 @@
+## RabbitMQ 3.8.4
+
+RabbitMQ `3.8.4` is a maintenance release. It focuses on bug fixes and usability
+improvements. etcd [peer discovery](https://www.rabbitmq.com/cluster-formation.html) plugin includes potentially breaking changes which are covered in detail below.
+
+This is the first release to be [fully compatible](https://groups.google.com/forum/#!topic/rabbitmq-users/wlPIWz3UYHQ) with [Erlang 23](http://blog.erlang.org/OTP-23-Highlights/).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.3` or `23.0` releases are recommended.
+
+This is the first release to officially support Erlang 23. Compared to `3.8.3`, which
+generally works as expected on Erlang 23, [Prometheus monitoring](https://www.rabbitmq.com/prometheus.html) plugin had to
+be adapted to support a memory allocator metric format that has changed compared to
+earlier Erlang releases.
+
+There is one **remaining known Erlang 23 incompatibility**: `rabbitmq-diagnostics observer` **will fail** on Erlang 23.
+This will be addressed in a future release.
+The root cause is the same memory allocator metric change.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Raft Segment File Format Change
+
+In earlier `3.8.x` versions, Raft log segment files were unintentionally limited to 4 GiB,
+which can be problematic for environments that use quorum queues and medimum to large message
+payloads.
+
+This releases removes the limitation by changing the segment file on-disk format.
+While **no action is required** for upgrading to `3.8.4`, operators should keep in mind
+that segment files produced by `3.8.4` nodes won't be readable by earlier versions.
+In other words, this release **does not support downgrading** to earlier `3.8.x` versions.
+
+### Custom Prefixless Node Name
+
+Environments that override node name using `NODENAME` in `rabbitmq-env.conf` may have to
+also define `RABBITMQ_NODENAME` in the same file for CLI tools to pick the value up:
+
+``` ini
+# how node name is typically overridden in rabbitmq-env.conf[.bat] files
+NODENAME=rabbit@custom.hostname
+# for CLI tools in 3.8.4, otherwise not necessary
+RABBITMQ_NODENAME=rabbit@custom.hostname
+```
+
+This behaviour change compared to earlier `3.8.x` version was not intentional and was
+[corrected for `3.8.5`](https://github.com/rabbitmq/rabbitmq-cli/pull/422).
+
+Alternatively, it is possible to provide target node name using a command line flag:
+
+``` shell
+rabbitmq-diagnostics -n rabbit@custom.hostname status
+```
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release from `3.7.6` or an older version, extra care has to be taken.
+
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later, RabbitMQ **must be upgraded at the same time as Erlang**.
+Alternatively the node can be upgraded to `3.7.18` first, then Erlang 22.x or 23.x, then RabbitMQ to most recent
+3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * A node recovering a large disk backlog did not throttle its recovery activity, potentially
+ using large amounts of memory at peak during recovery.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2254](https://github.com/rabbitmq/rabbitmq-server/issues/2254)
+
+ * Raft log segment files were unintentionally limited to 4 GiB in size, which could result
+ in failing log flush operations in environments where medium and large messages were published to
+ quorum queues.
+
+ This problem is addressed by a change to the on disk format of Raft log segment files.
+ The change is irreversible, meaning that **there is no downgrade path** from `3.8.4` to earlier releases.
+
+ GitHub issue: [rabbitmq/ra#176](https://github.com/rabbitmq/ra/pull/176)
+
+ * A reached delivery limit on a quorum queue could result in an exception for polling (`basic.get`) consumers.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2330](https://github.com/rabbitmq/rabbitmq-server/pull/2330)
+
+ * A publish to a quorum queue that happened to not have an elected leader failed with an exception.
+ Such operations are generally expected to be retried by channels.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2295](https://github.com/rabbitmq/rabbitmq-server/pull/2295)
+
+ * Separate authN and authZ backends did not behave as expected when `rabbitmq_auth_backend_cache` was enabled.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2322](https://github.com/rabbitmq/rabbitmq-server/issues/2322)
+
+ * Handle a condition where epmd connection was closed during node registration.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2332](https://github.com/rabbitmq/rabbitmq-server/pull/2332)
+
+ * Registering a quorum queue consumer did not result in a `consumer.created` event like it would
+ for classic queues.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2341](https://github.com/rabbitmq/rabbitmq-server/issues/2341)
+
+#### Enhancements
+
+ * Reduced peak memory use of quorum queues that go idle (not under constant load).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2272](https://github.com/rabbitmq/rabbitmq-server/pull/2272)
+
+ * Change default configuration values for quorum queues to improve publisher back pressure when under heavy load.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2334](https://github.com/rabbitmq/rabbitmq-server/pull/2334)
+
+ * Definition import is now partially concurrent, improving CPU utilisation and reducing the amount of time
+ taken to import a large definition file.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2224](https://github.com/rabbitmq/rabbitmq-server/issues/2224)
+
+ * Improved configuration merging for cases when multiple files are used, e.g. `rabbitmq.conf` plus
+ `advanced.config` or even multiple `.conf` files (the latter is an experimental feature).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2277](https://github.com/rabbitmq/rabbitmq-server/pull/2277)
+
+ * More peer discovery errors are retried.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#1627](https://github.com/rabbitmq/rabbitmq-server/issues/1627)
+
+ * Allow product name, version and MOTD to be overwritten via environment variables.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2247](https://github.com/rabbitmq/rabbitmq-server/pull/2247)
+
+ * Improve display of product name and version.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2326](https://github.com/rabbitmq/rabbitmq-server/pull/2326)
+
+ * Make RabbitMQ a regular Erlang/OTP application.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2180](https://github.com/rabbitmq/rabbitmq-server/issues/2180)
+
+
+### CLI Tools
+
+#### Enhancements
+
+ * New `rabbitmq-upgrade` commands: `await_online_quorum_plus_one` and `await_online_synchronized_mirror`.
+ The commands will wait for a safe quorum of nodes (quorum queues) or a synchronised leader replica (classic mirrored queues)
+ to be or come online within a period of time.
+
+ ``` sh
+ # Exits with a non-zero exit code if there is not a quorum + 1 of nodes online for all
+ # quorum queues within 120 seconds (default timeout)
+ rabbitmq-upgrade await_online_quorum_plus_one
+ # Will wait for a quorum + 1 of nodes to be online for all quorum queues for 0 seconds...
+
+ # The timeout can be adjusted via the -t flag:
+ rabbitmq-upgrade await_online_quorum_plus_one -t 60
+ # Will wait for a quorum + 1 of nodes to be online for all quorum queues for 60 seconds...
+
+ # There is an equivalent command for classic mirrored queues
+ rabbitmq-upgrade await_online_synchronized_mirror
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#409](https://github.com/rabbitmq/rabbitmq-cli/issues/409)
+
+ * New `rabbitmq-diagnostics` command: `os_env`
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#412](https://github.com/rabbitmq/rabbitmq-cli/pull/412)
+
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * Compatibility with Erlang 23 memory allocator metric format.
+
+ GitHub issue: [rabbitmq/rabbitmq-prometheus#42](https://github.com/rabbitmq/rabbitmq-prometheus/issues/42)
+
+ * Node color labeling in Grafana dashboards now works out of the box with one more common naming scheme.
+
+ GitHub issue: [rabbitmq/rabbitmq-prometheus#33](https://github.com/rabbitmq/rabbitmq-prometheus/issues/33)
+
+ Contributed by @Aakcht.
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Credential obfuscation now uses the same [shared secret](https://www.rabbitmq.com/clustering.html#erlang-cookie)
+ on all nodes to make sure that a link running on node A can be restarted by a command against node B.
+ Previously each node used its own generated secret for obfuscation, which means certain operations
+ on links originated on other nodes were not possible.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2351](https://github.com/rabbitmq/rabbitmq-server/pull/2351)
+
+#### Enhancements
+
+ * Internal resources cleanup is now more selective on abnormal link termination. A new upstream parameter, `resource-cleanup-mode`,
+ can be set to `"never"` used to prevent internal queues from being deleted by exchange federation links on termination.
+ Use this when data safety is more important than protection from resource leaks. It then will be up to the operator to make
+ sure such resources are periodically deleted or use a suitable [TTL](https://www.rabbitmq.com/ttl.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#105](https://github.com/rabbitmq/rabbitmq-federation/issues/105)
+
+ * SNI is now enabled by default for TLS-enabled connections.
+
+ Contributed by Carl Hörberg (CloudAMQP).
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#132](https://github.com/rabbitmq/rabbitmq-erlang-client/pull/132)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Credential obfuscation now uses the same [shared secret](https://www.rabbitmq.com/clustering.html#erlang-cookie)
+ on all nodes to make sure that a shovel running on node A can be restarted by a command against node B.
+ Previously each node used its own generated secret for obfuscation, which means certain operations
+ on shovels originated on other nodes were not possible.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2351](https://github.com/rabbitmq/rabbitmq-server/pull/2351)
+
+ * Shovels are now correctly removed even when deletion commands are executed against a different node
+ (not the one on which the target Shovel has been created).
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#68](https://github.com/rabbitmq/rabbitmq-shovel/issues/68)
+
+ * More negative AMQP 1.0 endpoint connection responses are handled.
+
+ Contributed by Renaud @bougueil Mariana.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#48](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/pull/48)
+
+ * Messages forwarded from an AMQP 0-9-1 source to an AMQP 1.0 destination now propagates more
+ message properties.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#70](https://github.com/rabbitmq/rabbitmq-shovel/issues/70)
+
+ * Pre-3.7 `delete-after` Shovel property is now considered as well as the new `src-delete-after` property.
+
+ GitHub issue: [rabbitmq/rabbitmq-shovel#72](https://github.com/rabbitmq/rabbitmq-shovel/issues/72)
+
+#### Enhancements
+
+ * Revisited [Shovel documentation guides](https://www.rabbitmq.com/shovel.html).
+
+ * SNI is now enabled by default for TLS-enabled connections.
+
+ Contributed by Carl Hörberg (CloudAMQP).
+
+ GitHub issue: [rabbitmq/rabbitmq-erlang-client#132](https://github.com/rabbitmq/rabbitmq-erlang-client/pull/132)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * More frequent garbage collection by stats DB cache processes in environments where HTTP API queries produced
+ large data sets.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#788](https://github.com/rabbitmq/rabbitmq-management/pull/788)
+
+ * `rabbitmqadmin` CLI switches now take precedence over config file values. This is more typical
+ for command line tools.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#804](https://github.com/rabbitmq/rabbitmq-management/issues/804)
+
+ * `rabbitmqadmin` binding deletion now defaults to the same property key value as binding creation.
+
+ Contributed by Ivan Kaloyanov.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#779](https://github.com/rabbitmq/rabbitmq-management/pull/779)
+
+#### Bug Fixes
+
+ * TLS settings of this plugin now supersede those of RabbitMQ core listeners. In more practical terms,
+ it is now possible to use a separate set of certificate/private key pairs for management UI and
+ the HTTP API.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#800](https://github.com/rabbitmq/rabbitmq-management/issues/800)
+
+ * Queue page failed to render with a `TypeError` when stats were disabled.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#787](https://github.com/rabbitmq/rabbitmq-management/issues/787)
+
+ * `rabbitmqadmin import` mangled non-ASCII characters.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#785](https://github.com/rabbitmq/rabbitmq-management/issues/785)
+
+ * `rabbitmqadmin`'s pretty JSON format was not compatible with Python 3.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#797](https://github.com/rabbitmq/rabbitmq-management/pull/797)
+
+
+### etcd Peer Discovery Plugin
+
+This release effectively replaces the etcd-based peer discovery plugin with a new one which
+[uses etcd v3 API](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/issues/22) and a different API client.
+etcd v2 API has been effectively deprecated and requires extra effort to enable with the latest etcd release,
+so this was **treated as a medium severity bug** and ships in a patch release.
+
+There are several new features that come with this new client. Existing already formed clusters won't have to
+change anything in the short term as [peer discovery only runs during cluster formation](https://www.rabbitmq.com/cluster-formation.html#peer-discovery-how-does-it-work).
+All configuration keys of the original plugin are still accepted by the plugin, and most will have
+the expected or semantically similar effect.
+
+When provisioning new clusters, etcd 3.4+ will now be required.
+
+#### Enhancements
+
+ * The plugin now uses an etcd v3 API client and v3 gRPC-based API. This includes using a forward-compatible API version,
+ natively-provided endpoint for locking instead of reinventing one, and other improvements listed below.
+ It also means that **etcd 3.4+** is now required by the plugin.
+
+ Unless specified otherwise, all other configuration settings of the original plugin are still supported.
+
+ `rabbitmq-autocluster`-era environment variable-based configuration of the plugin has been removed. Use the config
+ file-based one instead.
+
+ `cluster_formation.etcd.scheme` configuration key won't fail validation but will no longer have any effect.
+ `cluster_formation.etcd.key_prefix` will be used but only after the hardcoded `/rabbitmq/discovery`
+ and `/rabbitmq/locks` prefixes. Having a predictable key prefix is more important in this version of the etcd API.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#22](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/issues/22)
+
+ * Multiple etcd endpoings now can be specified as connection candidates. Previously used endpoint configuration keys,
+ `cluster_formation.etcd.host` and `cluster_formation.etcd.port`, are still supported and added to the final list of
+ endpoints.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#7](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/issues/7)
+
+ * Support for TLS-enabled connections to etcd.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#24](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/issues/24)
+
+ * Support for [etcd authentication](https://etcd.io/docs/v3.4.0/op-guide/authentication/).
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#6](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/issues/6)
+
+
+### Consul Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * In case `CONSUL_PORT` is set to a blank string, the plugin will fall back to the default port value
+ instead of failing with an exception. Note that the use of environment variables over config file
+ keys is **highly discouraged** and will be removed in a future version.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-consul#41](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/issues/41)
+
+
+### LDAP Plugin
+
+#### Enhancements
+
+ * Setting `` to `0` will force a new connection to be opened for every operation. This helps work around
+ obscure issues likely caused by per-connection caching in some LDAP implementations.
+ Users of this setting are expected to also use the [rabbitmq-auth-backend-cache](https://github.com/rabbitmq/rabbitmq-auth-backend-cache) plugin.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-ldap#120](https://github.com/rabbitmq/rabbitmq-auth-backend-ldap/pull/120)
+
+
+### OAuth 2.0 Plugin
+
+#### Enhancements
+
+ * When token key ID is missing in configuration, a warning will be logged to
+ make troubleshooting easier.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-oauth2#44](https://github.com/rabbitmq/rabbitmq-auth-backend-oauth2/issues/44)
+
+
+### Trust Store Plugin
+
+#### Bug Fixes
+
+ * Some debug messages were unintentionally logged as errors.
+
+ Contributed by @mgrafl.
+
+ GitHub issue: [rabbitmq/rabbitmq-trust-store#75](https://github.com/rabbitmq/rabbitmq-trust-store/issues/75)
+
+
+## Dependency Upgrades
+
+ * New dependency: [eetcd](https://github.com/zhongwencool/eetcd/)
+ * ra was upgraded [from v1.0.8 to v1.1.2](https://github.com/rabbitmq/ra/compare/v1.0.8...v1.1.2)
+ * `credentials_obfuscation` was upgraded [from v1.0.0 to v2.0.0](https://github.com/rabbitmq/credentials-obfuscation/compare/v1.0.0...v2.0.0)
+ * Prometheus client was upgraded [from 4.5.0 to 4.6.0](https://github.com/deadtrickster/prometheus.erl/compare/v4.5.0...v4.6.0)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.4.tar.xz`.
diff --git a/release-notes/3.8.5.md b/release-notes/3.8.5.md
new file mode 100644
index 0000000000..fc4457dc8b
--- /dev/null
+++ b/release-notes/3.8.5.md
@@ -0,0 +1,315 @@
+## RabbitMQ 3.8.5
+
+RabbitMQ `3.8.5` is a maintenance release.
+It focuses on bug fixes and usability improvements.
+
+This release is [compatible](https://groups.google.com/forum/#!topic/rabbitmq-users/wlPIWz3UYHQ) with [Erlang 23](http://blog.erlang.org/OTP-23-Highlights/).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.3` or `23.0` releases are recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Deprecations
+
+This release deprecates `rabbitmq-diagnostics node_health_check` (née `rabbitmqctl node_health_check`).
+See the section on CLI tools changes below to learn more.
+
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release from `3.7.6` or an older version, extra care has to be taken.
+
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` first, then Erlang 22.x or 23.x, then RabbitMQ to most recent
+3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * In `3.8.4`, [`RABBITMQ_CONF_ENV_FILE`](https://www.rabbitmq.com/configure.html#customise-environment) value was ignored on Windows.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#392](https://github.com/rabbitmq/rabbitmq-common/issues/392)
+
+ * Message store shutdown now uses a much higher timeout (up to 10 minutes instead of 30 seconds).
+
+ In environments with large message backlogs this means that the chances of queue indices rebuilding
+ after node restart is now substantially lower.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2324](https://github.com/rabbitmq/rabbitmq-server/issues/2324)
+
+ * On Windows, `RABBITMQ_HOME` locations with brackets are supported again.
+
+ Contributed by @twjs76.
+
+ GitHub issues: [rabbitmq/rabbitmq-server#2371](https://github.com/rabbitmq/rabbitmq-server/pull/2371),
+ [rabbitmq/rabbitmq-server#1756](https://github.com/rabbitmq/rabbitmq-server/issues/1756)
+
+ * `RABBITMQ_LOG_BASE` was not respected in some installations.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#131](https://github.com/rabbitmq/rabbitmq-server-release/issues/131)
+
+ * Setting `RABBITMQ_QUORUM_DIR` to a location outside of node's data directory for a new node resulted
+ in feature flags not being implicitly enabled on boot.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2317](https://github.com/rabbitmq/rabbitmq-server/issues/2317)
+
+ * `net_ticktime` and other Erlang distribution settings set in `rabbitmq.conf` were ignored because
+ distribution was started too early on node boot.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2377](https://github.com/rabbitmq/rabbitmq-server/issues/2377)
+
+#### Enhancements
+
+ * Raft data directory now can be configured using `ra.data_dir` configuration key:
+
+ ``` ini
+ ra.data_dir = /path/to/raft/data/directory
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2354](https://github.com/rabbitmq/rabbitmq-server/issues/2354)
+
+ * User and permission management operations now produce clear log entries when they succeed,
+ not just when they fail. This helps auditing in environments that have stricter security and audit
+ requirements.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2363](https://github.com/rabbitmq/rabbitmq-server/issues/2363)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * CLI tools now take `NODENAME` into account as well as `RABBITMQ_NODENAME`, just like the server does.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#421](https://github.com/rabbitmq/rabbitmq-cli/issues/421)
+
+ * `rabbitmq-diagnostics observer` is now compatible with Erlang 23.
+
+ GitHub issue: [zhongwencool/observer_cli#68](https://github.com/zhongwencool/observer_cli/issues/68)
+
+#### Deprecations
+
+ * `rabbitmq-diagnostics node_health_check` (née `rabbitmqctl node_health_check`) is **deprecated** and will be
+ removed in a future version.
+
+ `node_health_check` (as well as its HTTP API counterpart) is an opinionated, intrusive, aspirational attempt
+ at producing One True Health Checkâ„¢ for RabbitMQ that was envisioned and added to CLI tools around 2015-2016.
+
+ It has proven to be too prone to false positives, can consume a prohibitive
+ amount of CPU and runtime scheduler resources, and tends to be too opinionated
+ for many technical operations teams.
+
+ A much more modular, pick-and-choose approach [has been adopted since](https://www.rabbitmq.com/monitoring.html#health-checks)
+ but this command has never been deprecated. Until now.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#426](https://github.com/rabbitmq/rabbitmq-cli/issues/426)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * [Definition import](https://www.rabbitmq.com/definitions.html) was not performed when `management.load_definitions = /path/to/definitions/file.json`
+ was used, although it was for its newer counterpart, `load_definitions = /path/to/definitions/file.json`.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#818](https://github.com/rabbitmq/rabbitmq-management/issues/818)
+
+ * jQuery was upgraded to `3.5.1`.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#815](https://github.com/rabbitmq/rabbitmq-management/issues/815)
+
+
+### Consistent Hashing Exchange
+
+### Bug Fixes
+
+ * Due to [later plugin activation](https://github.com/rabbitmq/rabbitmq-server/issues/2381) in `3.8.4`, durable
+ consistent hashing exchanges were skipped during topology recovery. In practice that meant these exchanges
+ did not route messages published to it.
+
+ Now the plugin reconstructs consistent hashing ring manually for every durable exchange of this type and
+ routing functions as expected.
+
+ Kudos to Rob Gerritsen and Raimond @rai69 Kuipers for helping with discovery of this issue.
+
+ GitHub issue: [rabbitmq/rabbitmq-consistent-hash-exchange#45](https://github.com/rabbitmq/rabbitmq-consistent-hash-exchange/issues/45)
+
+### Enhancements
+
+ * `rabbitmq-diagnostics consistent_hash_exchange_ring_state <exchange>` is a new CLI command that makes it
+ easy to inspect consistent hashing ring state for an exchange.
+
+ GitHub issue: [rabbitmq/rabbitmq-consistent-hash-exchange#47](https://github.com/rabbitmq/rabbitmq-consistent-hash-exchange/pull/47)
+
+
+### MQTT Plugin
+
+#### Bug Fixes
+
+ * Client ID tracker operation timeout has been increased from default 5 seconds.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#231](https://github.com/rabbitmq/rabbitmq-mqtt/pull/231)
+
+
+### Web STOMP Plugin
+
+#### Enhacenements
+
+ * More graceful handling of incorrect or unparseable data.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#121](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/121)
+
+
+### AWS Peer Discovery Plugin
+
+#### Enhancements
+
+ * AWS peer discovery backend can be specified as `aws`:
+
+ ``` ini
+ cluster_formation.peer_discovery_backend = aws
+ ```
+
+ as well as the module name, or plugin name (`rabbitmq_peer_discovery_aws`):
+
+ ``` ini
+ cluster_formation.peer_discovery_backend = rabbit_peer_discovery_aws
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-aws#34](https://github.com/rabbitmq/rabbitmq-peer-discovery-aws/issues/34)
+
+
+### Consul Peer Discovery Plugin
+
+#### Enhancements
+
+ * Consul peer discovery backend can be specified as `consul`:
+
+ ``` ini
+ cluster_formation.peer_discovery_backend = consul
+ ```
+
+ as well as the module name, or plugin name (`rabbitmq_peer_discovery_consul`):
+
+ ``` ini
+ cluster_formation.peer_discovery_backend = rabbit_peer_discovery_consul
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-consul#43](https://github.com/rabbitmq/rabbitmq-peer-discovery-consul/issues/43)
+
+
+### etcd Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * Locking operation against `etcd` v3 API failed with a `case_clause`.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#30](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/issues/30)
+
+#### Enhancements
+
+ * etcd peer discovery backend can be specified as `etcd`:
+
+ ``` ini
+ cluster_formation.peer_discovery_backend = etcd
+ ```
+
+ as well as the module name, or plugin name (`rabbitmq_peer_discovery_etcd`):
+
+ ``` ini
+ cluster_formation.peer_discovery_backend = rabbit_peer_discovery_etcd
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-etcd#26](https://github.com/rabbitmq/rabbitmq-peer-discovery-etcd/issues/26)
+
+
+### Kubernetes Peer Discovery Plugin
+
+#### Enhancements
+
+ * Kubernetes peer discovery backend can be specified as `k8s` or `kubernetes`:
+
+ ``` ini
+ cluster_formation.peer_discovery_backend = k8s
+ ```
+
+ as well as the module name, or plugin name (`rabbitmq_peer_discovery_k8s`):
+
+ ``` ini
+ cluster_formation.peer_discovery_backend = rabbit_peer_discovery_k8s
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-k8s#65](https://github.com/rabbitmq/rabbitmq-peer-discovery-k8s/issues/65)
+
+
+### Web STOMP Plugin
+
+#### Enhancements
+
+ * [Basic HTTP auth setting](https://www.rabbitmq.com/web-stomp.html) is now exposed to new style configuration:
+
+ ``` ini
+ web_stomp.use_http_auth = true
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#119](https://github.com/rabbitmq/rabbitmq-web-stomp/issues/119)
+
+
+## Debian Package
+
+### Enhancements
+
+ * Invocations of `rabbitmqctl wait` in service management scripts now use a timeout.
+
+ Contributed by Nicolas @nicolasbock Bock.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#129](https://github.com/rabbitmq/rabbitmq-server-release/pull/129)
+
+
+## RPM Package
+
+### Enhancements
+
+ * Invocations of `rabbitmqctl wait` in service management scripts now use a timeout.
+
+ Contributed by Nicolas @nicolasbock Bock.
+
+ GitHub issue: [rabbitmq/rabbitmq-server-release#129](https://github.com/rabbitmq/rabbitmq-server-release/pull/129)
+
+
+## Dependency Upgrades
+
+ * `recon` was upgraded [from 2.5.0 to 2.5.1](https://github.com/ferd/recon/compare/2.5.0...2.5.1)
+ * `observer_cli` was upgraded [from 1.5.3 to 1.5.4](https://github.com/zhongwencool/observer_cli/compare/1.5.3...1.5.4)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.5.tar.xz`.
diff --git a/release-notes/3.8.6.md b/release-notes/3.8.6.md
new file mode 100644
index 0000000000..97cea05d40
--- /dev/null
+++ b/release-notes/3.8.6.md
@@ -0,0 +1,306 @@
+## RabbitMQ 3.8.6
+
+RabbitMQ `3.8.6` is a maintenance release.
+It focuses on bug fixes and usability improvements.
+
+### RabbitMQ Adopts the Mozilla Public License 2.0
+
+Starting with this release, core RabbitMQ server and all [tier 1 plugins](https://www.rabbitmq.com/plugins.html#plugin-tiers)
+are [relicensed](https://github.com/rabbitmq/rabbitmq-server/issues/2372) under the [Mozilla Public License 2.0](https://www.mozilla.org/en-US/MPL/2.0/)
+(previously used license: the Mozilla Public License 1.1).
+
+The permissiveness of the MPL 2.0 is largely the same as of the MPL 1.1.
+See the [MPL 2.0 FAQ](https://www.mozilla.org/en-US/MPL/2.0/FAQ/) and
+[MPL 2.0 Revision and Changes FAQ](https://www.mozilla.org/en-US/MPL/2.0/Revision-FAQ/) to learn more.
+
+### Erlang 23 Compatibility
+
+This release is [compatible](https://groups.google.com/forum/#!topic/rabbitmq-users/wlPIWz3UYHQ) with [Erlang 23](http://blog.erlang.org/OTP-23-Highlights/).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.3` or `23.0` releases are recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Deprecations
+
+This release removed HiPE precompilation support. HiPE has been deprecated and
+mostly unmaintained since Erlang 22, and will be replaced with a JIT [in Erlang 24](http://erlang.org/pipermail/erlang-questions/2020-June/099645.html)
+in 2021.
+
+``` shell
+rabbitmqctl hipe_compile
+```
+
+and
+
+``` ini
+hipe_compile = true
+```
+
+are both retained for backwards compatibility but they won't have any effect.
+
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release from `3.7.6` or an older version, extra care has to be taken.
+
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` first, then Erlang 22.x or 23.x, then RabbitMQ to most recent
+3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### License Change
+
+RabbitMQ has been [relicensed](https://github.com/rabbitmq/rabbitmq-server/issues/2372) under the [Mozilla Public License 2.0](https://www.mozilla.org/en-US/MPL/2.0/)
+(previously used license: the Mozilla Public License 1.1).
+
+The permissiveness of the MPL 2.0 is largely the same as of the MPL 1.1.
+See the [MPL 2.0 FAQ](https://www.mozilla.org/en-US/MPL/2.0/FAQ/) and
+[MPL 2.0 Revision and Changes FAQ](https://www.mozilla.org/en-US/MPL/2.0/Revision-FAQ/) to learn more.
+
+#### Deprecations
+
+ * Removed HiPE pre-compilation support.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2392](https://github.com/rabbitmq/rabbitmq-server/pull/2392)
+
+#### Bug Fixes
+
+ * Definition import on node boot (via the **built-in `load_definitions` configuration setting**) is now performed
+ after plugins are enabled.
+
+ This resolves a long-standing chicken-and-egg dependency problem that prevented definitions
+ that depend on plugins being functional from being successfully imported during node boot.
+
+ Note that definition import via the management plugin (`management.load_definitions`) is still
+ performed during management plugin activation time and therefore **can not offer any guarantees**
+ as some plugins may be enabled after the management one.
+
+ Some affected features include
+
+ * [federation upstreams](https://www.rabbitmq.com/federation.html#getting-started)
+ * [dynamic shovels](https://www.rabbitmq.com/shovel-dynamic.html)
+ * exchanges of types provided by plugins such as [rabbitmq-consistent-hash-exchange](https://github.com/rabbitmq/rabbitmq-consistent-hash-exchange)
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2384](https://github.com/rabbitmq/rabbitmq-server/issues/2384)
+
+ * Similarly, client connection listeners (both TCP and TLS-enabled) are now started after
+ plugin activation. This makes certain configurations that use the trust store plugin
+ behave as expected.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2405](https://github.com/rabbitmq/rabbitmq-server/issues/2405)
+
+ * Environment variables prefixed with `RABBITMQ_` were ignored when used in the [`rabbitmq-env.conf` file](),
+ while their prefixless counterparts worked as expected.
+
+ GitHub issue: [rabbitmq/rabbitmq-common#401](https://github.com/rabbitmq/rabbitmq-common/issues/401)
+
+#### Enhancements
+
+ * `rabbitmq.conf` config parser now accepts lines that only consist of whitespace.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2393](https://github.com/rabbitmq/rabbitmq-server/pull/2393)
+
+
+### CLI Tools
+
+#### Deprecations
+
+ * `rabbitmqctl hipe_compile` is now a no-op. HiPE support has been deprecated in Erlang 22
+ [will be removed in Erlang 24](http://erlang.org/pipermail/erlang-questions/2020-June/099645.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#433](https://github.com/rabbitmq/rabbitmq-cli/pull/433)
+
+ * The use of the `` environment variable to configure the [shared secret](https://www.rabbitmq.com/cli.html#erlang-cookie) is now deprecated.
+ Use the [local file](https://www.rabbitmq.com/cli.html#cookie-file-locations) option instead or, if the value absolutely must be provided on the command line,
+ the `--erlang-cookie [value]` switch.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#443](https://github.com/rabbitmq/rabbitmq-cli/issues/443)
+
+#### Bug Fixes
+
+ * `rabbitmqctl export_definitions` incorrectly serialised runtime parameter values.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#435](https://github.com/rabbitmq/rabbitmq-cli/issues/435)
+
+ * `CTL_ERL_ARGS` environment variable was unintentionally ignored starting with version `3.8.4`.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2394](https://github.com/rabbitmq/rabbitmq-server/issues/2394)
+
+ * `[command] --help` and `help [command]` output had minor unintentional inconsistencies.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#437](https://github.com/rabbitmq/rabbitmq-cli/pull/437)
+
+#### Enhancements
+
+ * `rabbitmq-queues check_if_node_is_quorum_critical` and `rabbitmq-queues check_if_node_is_mirror_sync_critical` checks
+ have been adjusted. They now special case single node clusters as the check does not make much sense
+ in those environments.
+
+ `rabbitmq-queues check_if_node_is_mirror_sync_critical` now also excludes exclusive queues from
+ consideration as they, even if matched by a policy that enables classic queue mirroring,
+ would not actually be mirrored.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#432](https://github.com/rabbitmq/rabbitmq-cli/issues/432)
+
+ * `rabbitmq-diagnostics status` now includes the total amount of memory used by the node in its
+ regular output. It was previously only available when `--formatter=json` switch was used.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#449](https://github.com/rabbitmq/rabbitmq-cli/pull/449)
+
+ * `rabbitmq-diagnostics erlang_cookie_sources` is a new command that helps with troubleshooting
+ CLI tool authentication to nodes by listing relevant environment information about the
+ [shared secret](https://www.rabbitmq.com/cli.html#erlang-cookie). The actual secret
+ **is not displayed** for obvious security reasons.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#446](https://github.com/rabbitmq/rabbitmq-cli/pull/446)
+
+* `rabbitmqct eval_file` is a new command that evaluates Erlang code expressions from a local file
+ on the target node:
+
+ ``` shell
+ rabbitmqctl eval_file /path/to/code_snippet.escript
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#438](https://github.com/rabbitmq/rabbitmq-cli/issues/438)
+
+ * `rabbitmqct eval` now accepts code snippets from standard input:
+
+ ``` shell
+ rabbitmqctl eval <<EOF
+ io:format("~p~n", [
+ rabbit:product_info()
+ ]).
+ ```
+
+ ``` shell
+ rabbitmqctl eval < code_snippet.escript
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#438](https://github.com/rabbitmq/rabbitmq-cli/issues/438)
+
+ * `rabbitmq-diagnostics resolve_hostname` is a new command that resolves a hostname either locally
+ or on the node, and returns the resolved IPv4 or IPv6 addresses.
+
+ The command can help quickly detect hostname resolution irregularities on a node, including
+ the effects of [Erlang inetrc file](https://erlang.org/doc/apps/erts/inet_cfg.html) settings.
+ Hostname resolution issues can cause CLI tools, cluster formation and peer reconnection to
+ not work as expected.
+
+ Some examples:
+
+ ``` shell
+ rabbitmq-diagnostics resolve_hostname "google.com" --address-family ipv4
+
+ rabbitmq-diagnostics resolve_hostname "google.com" --address-family ipv6 --offline
+ ```
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#431](https://github.com/rabbitmq/rabbitmq-cli/pull/431)
+
+ * All CLI tools now provide an `autocomplete [prefix]` command that lists command completion
+ options. It is intended to be used to implement autocompletion of command names in shells.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#439](https://github.com/rabbitmq/rabbitmq-cli/issues/439)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * The plugin could send the `authorization` header as `null` in some cases before a successful login.
+
+ Contributed by @furkhat.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#833](https://github.com/rabbitmq/rabbitmq-management/pull/833)
+
+#### Enhancements
+
+ * The UI now uses more human-friendly information units when displaying client network traffic rate values.
+
+ Contribute by @seadog007.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#827](https://github.com/rabbitmq/rabbitmq-management/pull/827)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * In environments where `rabbitmq_federation.pgroup_name_cluster_id` is not set or set to `undefined`,
+ links will treat such values as a `false` (the default) instead of failing with an exception.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#109](https://github.com/rabbitmq/rabbitmq-federation/pull/109)
+
+
+### MQTT Plugin
+
+#### Bug Fixes
+
+ * The plugin could fail to accept new client connections with an exception in some cases.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#235](https://github.com/rabbitmq/rabbitmq-mqtt/pull/235)
+
+
+### Web STOMP Plugin
+
+#### Enhancements
+
+ * Certain Web Socket errors are now handled more gracefully.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-stomp#123](https://github.com/rabbitmq/rabbitmq-web-stomp/pull/123)
+
+
+### Web MQTT Plugin
+
+#### Enhancements
+
+ * Certain Web Socket errors are now handled more gracefully.
+
+ GitHub issue: [rabbitmq/rabbitmq-web-mqtt#64](https://github.com/rabbitmq/rabbitmq-web-mqtt/issues/64)
+
+
+### AuthN/AuthZ Backend HTTP Plugin
+
+#### Bug Fixes
+
+ * Virtual host access checks failed in certain complex configurations.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-http#100](https://github.com/rabbitmq/rabbitmq-auth-backend-http/issues/100)
+
+
+## Dependency Upgrades
+
+ * `cuttlefish` was upgraded [from 2.2.0 to 2.3.0](https://github.com/Kyorai/cuttlefish/compare/v2.2.0...v2.3.0)
+ * `credentials_obfuscation` was upgraded [from 2.0.0 to 2.1.1](https://github.com/rabbitmq/credentials-obfuscation/compare/v2.0.0...v2.1.1)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.6.tar.xz`.
diff --git a/release-notes/3.8.7.md b/release-notes/3.8.7.md
new file mode 100644
index 0000000000..918ce95ea1
--- /dev/null
+++ b/release-notes/3.8.7.md
@@ -0,0 +1,113 @@
+## RabbitMQ 3.8.7
+
+RabbitMQ `3.8.7` is a maintenance release that patches
+a security vulnerability.
+
+RabbitMQ Core team would like to thank Ofir Hamam and Tomer Hadad at Ernst & Young's Hacktics Advanced Security Center
+for researching and responsibly disclosing the vulnerability addressed in this release.
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.3` or `23.0` releases are recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 21.x or Later Versions
+
+When upgrading to this release from `3.7.6` or an older version, extra care has to be taken.
+
+Since CLI tools from RabbitMQ releases older than 3.7.7 will fail on Erlang 21 or later,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` first, then Erlang 22.x or 23.x, then RabbitMQ to most recent
+3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * Addressed a Windows-specific binary planting security vulnerability [CVE-2020-5419](https://tanzu.vmware.com/security/cve-2020-5419) that allowed for arbitrary code execution.
+ The vulnerability requires the attacker to have local access and elevated privileges,
+ and cannot be executed remotely.
+
+ [CVSS score](https://www.first.org/cvss/calculator/3.0#CVSS:3.0/AV:L/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H): `6.7` (medium severity).
+
+ This vulnerability was researched and [responsibly disclosed](https://www.rabbitmq.com/contact.html#security) by
+ Ofir Hamam and Tomer Hadad at Ernst & Young's Hacktics Advanced Security Center.
+
+ * In a mixed version cluster, virtual host limits were incorrectly reported for yet-to-be-upgraded nodes.
+
+ Contributed by @mnxumalo.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2430](https://github.com/rabbitmq/rabbitmq-server/pull/2430)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * Definition export using `rabbitmqctl export_definitions` exported [optional queue arguments](https://www.rabbitmq.com/queues.html#optional-arguments) as blank.
+ Export performed via the HTTP API was not affected by this problem.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2427](https://github.com/rabbitmq/rabbitmq-server/issues/2427)
+
+ * Invoking `rabbitmqctl` (or other tools) without any arguments produced help output that was inconsistent
+ from `rabbitmqctl help` in line spacing.
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Links in some environments upgraded from earlier `3.8.x` versions could run into a data coercion exception
+ when connection credentials were unencrypted.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#112](https://github.com/rabbitmq/rabbitmq-federation/pull/112)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Shovels where the source is AMQP 1.0 endpoint now gracefully handle link detachment
+ if the remote end set the `closed` attribute to `false`.
+
+ Contributed by @tstorck.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#56](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/pull/56)
+
+ * Removed some debug logging that was unintentionally polluting standard output even when
+ debug logging was not enabled.
+
+ Contributed by @sircinek.
+
+ GitHub issue: [rabbitmq/rabbitmq-amqp1.0-client#54](https://github.com/rabbitmq/rabbitmq-amqp1.0-client/pull/54)
+
+
+## Dependency Upgrades
+
+ * `credentials_obfuscation` was upgraded [from 2.1.1 to 2.2.0](https://github.com/rabbitmq/credentials-obfuscation/compare/v2.1.1...v2.2.0)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.7.tar.xz`.
diff --git a/release-notes/3.8.8.md b/release-notes/3.8.8.md
new file mode 100644
index 0000000000..3267c3b79f
--- /dev/null
+++ b/release-notes/3.8.8.md
@@ -0,0 +1,153 @@
+## RabbitMQ 3.8.8
+
+RabbitMQ `3.8.8` is a maintenance release.
+It focuses on bug fixes and usability improvements.
+
+This releases introduces a [new node operation mode](https://github.com/rabbitmq/rabbitmq-server/issues/2321), called the *maintenance mode*.
+This feature is useful when performing rolling upgrades and is covered
+in more detail below.
+
+This is the [last RabbitMQ release to support Erlang 21.3](https://groups.google.com/forum/#!topic/rabbitmq-users/v3K5nZNsfwM).
+
+### Erlang 23 Compatibility
+
+This release is [compatible](https://groups.google.com/forum/#!topic/rabbitmq-users/wlPIWz3UYHQ) with [Erlang 23](http://blog.erlang.org/OTP-23-Highlights/).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 21.3**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.3` or `23.0` releases are recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release.
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 22.x or Later Versions
+
+When upgrading to this release from `3.7.15` or an older version, extra care has to be taken.
+
+Because older RabbitMQ CLI tools can be incompatible with Erlang 22+ releases,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` on Erlang 21.3 first,
+then Erlang 22.x or 23.x, then RabbitMQ to most recent 3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Enhancements
+
+ * Maintenance mode. This is a [new mode operation mode for RabbitMQ nodes](https://github.com/rabbitmq/rabbitmq-server/issues/2321). The mode is explicitly turned
+ on and off by the operator using a bunch of new CLI commands (see below). For mixed-version cluster
+ compatibility, this feature must be [enabled using a feature flag](https://www.rabbitmq.com/feature-flags.html).
+
+ When a node is being put down for maintenance and in maintenance mode, it will not be available for
+ serving client traffic and will try to transfer as many of its responsibilities as practically possible
+ and safe.
+
+ Currently this involves the following steps:
+
+ * Suspend all client connection listeners (no new client connections will be accepted)
+ * Close all existing client connections: applications are expected to reconnect to other nodes and recover
+ * Transfer primary replicas of all classic mirrored queues hosted on the target node
+ * Transfer primary replicas of all quorum queues hosted on the target node, and prevent them from participating
+ in the subsequently triggered Raft elections
+ * Mark the node as down for maintenance
+ * At this point, a node shutdown will be least disruptive as the node has already transferred most of its
+ responsibilities
+
+ A node in maintenance mode will not be considered for new primary queue replica placement,
+ regardless of queue type and the [queue leader locator policy](https://www.rabbitmq.com/ha.html#leader-migration-data-locality) used.
+
+ A node in maintenance mode is expected to be shut down, upgraded or reconfigured, and restarted in a short
+ period of time. Nodes are not expected to be running in this mode for long periods of time.
+
+ A node in maintenance mode can be revived, that is, brough back into its regular operational state.
+ When a node is revived restarted (e.g. after an upgrade), it will again accept client connections
+ and be considered for primary queue replica placements. It will not recover previous client connections
+ as RabbitMQ never initiates connections to clients, but clients will be able to reconnect to it.
+
+ This feature is expected to evolve based on the feedback from the community, the field and the
+ RabbitMQ core team at VMware. For example, it can be adjusted based on the learnings when
+ adding upgrades support to the [RabbitMQ Cluster Operator for Kubernetes](https://www.rabbitmq.com/kubernetes/operator/operator-overview.html).
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2321](https://github.com/rabbitmq/rabbitmq-server/issues/2321)
+
+
+### CLI
+
+#### Enhancements
+
+ * `rabbitmq-upgrade drain` is a new command that puts a node in maintenance mode (see above).
+ `rabbitmq-upgrade revive` is its counterpart that "revives" a node by putting it into the regular
+ operating mode.
+
+ `rabbitmqctl suspend_listeners` is a new command that suspends all [client connection listeners](https://www.rabbitmq.com/networking.html#ports)
+ (ports) on the node. This means that existing connections won't be affected but no new connections
+ will be accepted. This can be useful during upgrades, in particular using the
+ [Blue/Green deployment strategy](https://www.rabbitmq.com/blue-green-upgrade.html).
+
+ `rabbitmqctl resume_listeners` resumes all client listeners so that they accept new client connections
+ again.
+
+ All these commands will have an effect only if the `maintenance_mode_status` flag is enabled.
+
+ GitHub issue: [rabbitmq/rabbitmq-cli#419](https://github.com/rabbitmq/rabbitmq-cli/pull/419)
+
+
+### MQTT Plugin
+
+#### Bug Fixes
+
+ * Abrupt client connection termination could result in an Erlang process leak. This resource leak
+ affects RabbitMQ versions between `3.8.5` and `3.8.7`, inclusive.
+
+ GitHub issue: [rabbitmq/rabbitmq-mqtt#239](https://github.com/rabbitmq/rabbitmq-mqtt/pull/239)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * A node that had a lot of on disk data and constrainted resources could spend a very long time
+ activating the plugin while queue primary replicas were busy recovering the data.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2437](https://github.com/rabbitmq/rabbitmq-server/issues/2437)
+
+
+### OAuth 2 AuthN/AuthZ Plugin
+
+#### Enhancements
+
+ * Simplified scope merging code specific to Keycloak.
+
+ Contributed by Simon @SimonAlling Alling.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-oauth2#52](https://github.com/rabbitmq/rabbitmq-auth-backend-oauth2/pull/52)
+
+
+## Dependency Upgrades
+
+ * `cuttlefish` was upgraded [from 2.3.0 to 2.4.1](https://github.com/Kyorai/cuttlefish/compare/v2.3.0...v2.4.1)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.8.tar.xz`.
diff --git a/release-notes/3.8.9.md b/release-notes/3.8.9.md
new file mode 100644
index 0000000000..11fcad0394
--- /dev/null
+++ b/release-notes/3.8.9.md
@@ -0,0 +1,135 @@
+## RabbitMQ 3.8.9
+
+RabbitMQ `3.8.9` is a maintenance release.
+It focuses on bug fixes and usability improvements.
+
+This release [**no longer supports** Erlang 21.3](https://groups.google.com/forum/#!topic/rabbitmq-users/v3K5nZNsfwM).
+
+### Erlang 23 Compatibility
+
+This release is [compatible](https://groups.google.com/forum/#!topic/rabbitmq-users/wlPIWz3UYHQ) with [Erlang 23](http://blog.erlang.org/OTP-23-Highlights/).
+
+### Erlang/OTP Compatibility Notes
+
+This release [**requires Erlang/OTP 22.0**](https://www.rabbitmq.com/which-erlang.html) or later.
+`22.3` or `23.0` releases are recommended.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains
+what package repositories and tools can be used to provision latest patch versions of Erlang `22.3.x`.
+
+
+## Upgrade and Compatibility Notes
+
+### Upgrade Doc Guides and Change Log
+
+See [3.8.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.8.0) upgrade and
+compatibility notes first if upgrading from an earlier release series (e.g. `3.7.x`).
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for general documentation on upgrades and
+[RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+### Upgrading to Erlang 22.x or Later Versions
+
+When upgrading to this release from `3.7.15` or an older version, extra care has to be taken.
+
+Because older RabbitMQ CLI tools can be incompatible with Erlang 22+ releases,
+RabbitMQ **must be upgraded at the same time as Erlang**.
+
+Alternatively the node can be upgraded to `3.7.18` on Erlang 21.3 first,
+then Erlang 22.x or 23.x, then RabbitMQ to most recent 3.8.x release.
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+## Changes Worth Mentioning
+
+### Core Server
+
+#### Bug Fixes
+
+ * Starting with `3.8.0`, an unintentionally introduced assertion could prevent classic mirrored queue
+ mirrors from starting successfully in case the primary replica changed concurrently with their startup.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2449](https://github.com/rabbitmq/rabbitmq-server/pull/2449)
+
+#### Enhancements
+
+ * Raft implementation's [failure detector](https://github.com/rabbitmq/aten) default polling interval has been increased from 1s to 5s.
+ The previously used default results in too frequent leader elections in networks with high packet loss
+ (say, double digit percent).
+
+ This has no correctness or safety ramifications, however, automated tests that assert on Raft properties
+ or expect a very fast leader re-election may have to set it back so that peer failures are detected quicker.
+
+ GitHub issue: [rabbitmq/rabbitmq-server#2450](https://github.com/rabbitmq/rabbitmq-server/pull/2450)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * The now deprecated intrusive health check endpoint, `GET /api/healthchecks/node`, reported failures in the response
+ but always set the status to `200 OK`. It now uses the status of `500` in case of a failure.
+
+ [RabbitMQ Monitoring guide](https://www.rabbitmq.com/monitoring.html#health-checks) explains why
+ this health check is no longer recommended and what the alternatives are.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#840](https://github.com/rabbitmq/rabbitmq-management/issues/840)
+
+#### Enhancements
+
+ * When a client doubly encodes a request into JSON, the plugin tries to detect that and will produce a more
+ helpful 400 response instead of a 500.
+
+ GitHub issue: [rabbitmq/rabbitmq-management#839](https://github.com/rabbitmq/rabbitmq-management/issues/839)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * When a classic mirrored queue with a replica on every cluster node (this is [highly discouraged](https://www.rabbitmq.com/ha.html#replication-factor)) was federated,
+ multiple links could be started by the plugin, potentially making message transfers concurrent
+ and affecting ordering.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#114](https://github.com/rabbitmq/rabbitmq-federation/issues/114)
+
+#### Enhancements
+
+ * It is now possible to federate exchanges in different virtual hosts in the same cluster for more than one hop.
+ For example, federate messages from an exchange in virtual host B to C, while B itself is federated from
+ virtual host A. Previously such messages would have been filtered by the message cycle detection algorithm
+ which assumed that exchanges are only federated across clusters.
+
+ GitHub issue: [rabbitmq/rabbitmq-federation#116](https://github.com/rabbitmq/rabbitmq-federation/issues/116)
+
+
+### Kubernetes Peer Discovery Plugin
+
+#### Bug Fixes
+
+ * Kubernetes API events registered by this plugin mixed up reason and message fields.
+
+ Contributed by Gabriele Santomaggio.
+
+ GitHub issue: [rabbitmq/rabbitmq-peer-discovery-k8s#70](https://github.com/rabbitmq/rabbitmq-peer-discovery-k8s/pull/70)
+
+
+### OAuth 2 AuthN/AuthZ Plugin
+
+#### Enhancements
+
+ * Improved compatibility with Azure AD. This plugin can now be configured to fetch scopes from a non-standard
+ field in the decoded token value (other than the standard `scopes` field).
+
+ Contributed by @Robert-Gustafsson.
+
+ GitHub issue: [rabbitmq/rabbitmq-auth-backend-oauth2#53](https://github.com/rabbitmq/rabbitmq-auth-backend-oauth2/pull/53)
+
+
+## Source code archives
+
+**Warning**: The source code archive provided by GitHub only contains the source of the broker, not the plugins or the client libraries.
+Please download the archive named `rabbitmq-server-3.8.9.tar.xz`.
diff --git a/release-notes/3.9.0.md b/release-notes/3.9.0.md
new file mode 100644
index 0000000000..1c80f00ef2
--- /dev/null
+++ b/release-notes/3.9.0.md
@@ -0,0 +1,242 @@
+The most important changes are [Streams](https://blog.rabbitmq.com/posts/2021/07/rabbitmq-streams-overview/), support for [Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/), and improved integration with our [RabbitMQ Kubernetes Operator](https://blog.rabbitmq.com/posts/2020/11/rabbitmq-kubernetes-operator-reaches-1-0/). Other changes worth mentioning are JSON logging, per-protocol and queue type message rates as Prometheus metrics and faster initial cluster formation on Kubernetes.
+
+Find blog posts that expand on the previously mentioned topics: https://blog.rabbitmq.com/tags/v3.9.x/
+
+
+
+## Highlights
+
+
+### Streams
+
+[Streams](http://rabbitmq.com/streams.html) are a new persistent and replicated data structure ("queue type") in RabbitMQ which models an append-only log with non-destructive consumer semantics.
+
+They can be used as a regular AMQP 0.9.1 queue or through a new [binary protocol plugin](http://rabbitmq.com/stream.html) and associated client(s).
+
+Streams enable messaging patterns that were not possible or practical before, for example, repeatable non-destructive consumption and mass fanouts to a very large number of consumers.
+
+
+### Erlang 24 Support
+
+[Erlang 24](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/) provides 20%-50% higher throughput for many workloads tested by the RabbitMQ core team and community members. Supporting the latest runtime version also allowed RabbitMQ to replace a few external dependencies with libraries now provided by the runtime.
+
+Erlang 24 is now used by default in the [community RabbitMQ Docker image](https://github.com/docker-library/rabbitmq).
+
+
+### Kubernetes
+
+[RabbitMQ Cluster Operator](https://www.rabbitmq.com/kubernetes/operator/operator-overview.html) automates provisioning, management, and operations of RabbitMQ clusters running on Kubernetes. It is [open source](https://github.com/rabbitmq/cluster-operator) and is developed by the RabbitMQ Core team at VMware, Inc. This Operator is not specific to 3.9 and can be used with latest 3.8.x release series.
+
+[Messaging Topology Operator](https://www.rabbitmq.com/kubernetes/operator/operator-overview.html#topology-operator) makes it possible to define RabbitMQ resources (virtual hosts, users, permissions, topologies, policies, etc) as Kubernetes objects. This Operator is also [open source](https://github.com/rabbitmq/messaging-topology-operator) and can be used with latest 3.8.x release series.
+
+
+### Logging in JSON
+
+[Switch from Lager to the new Erlang Logger API for logging](https://github.com/rabbitmq/rabbitmq-server/pull/2861) introduces an option for structured logging in JSON. This is relevant for the Prometheus-based alerting feature that we have been investing in this year: [Notify me when RabbitMQ has a problem](https://blog.rabbitmq.com/posts/2021/05/alerting/).
+
+
+
+## Obtaining Packages
+
+RabbitMQ releases are distributed via [GitHub](https://github.com/rabbitmq/rabbitmq-server/releases), [Cloudsmith](https://cloudsmith.io/~rabbitmq/repos/), and [PackageCloud](https://packagecloud.io/rabbitmq).
+
+
+### Erlang/OTP Compatibility Notes
+
+This release [requires Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) or later. Erlang 24 is recommended as it [offers 20%-50% throughput improvements](https://blog.rabbitmq.com/posts/2021/03/erlang-24-support-roadmap/) for many workloads.
+
+[Provisioning Latest Erlang Releases](https://www.rabbitmq.com/which-erlang.html#erlang-repositories) explains what package repositories and tools can be used to provision latest patch versions of Erlang 23.3.x and 24.x.
+
+
+
+## Upgrading to 3.9
+
+See the [Upgrading guide](https://www.rabbitmq.com/upgrade.html) for documentation on upgrades and [RabbitMQ change log](https://www.rabbitmq.com/changelog.html) for release notes of other releases.
+
+RabbitMQ 3.9.0 nodes can run alongside `3.8.x` nodes. However, `3.8.16+` as well as `3.9.x` both require Erlang 23.2 or later (and support 24). `3.9.x`-specific features can only be made available when all nodes in the cluster upgrade to 3.9.0 or any other patch release in the series.
+
+While operating in mixed version mode, some aspects of the system may not behave as expected. The list of known behavior changes is covered below. Once all nodes are upgraded to 3.9.0, these irregularities will go away.
+
+Mixed version clusters are a mechanism that allows rolling upgrade and are not meant to be run for extended periods of time (no more than a few hours).
+
+
+### Quorum Queue Declaration
+
+In a mixed version cluster, declaration of new quorum queues will fail if executed on a node in the version minority. For example, in a five node cluster with two nodes running 3.8, the three 3.9 nodes are in the version majority and the remaining are in the version minority.
+
+Existing quorum queues will operate just like before.
+
+
+### Classic Queue Metrics
+
+HTTP API responses will provide a [limited number of metrics](https://github.com/rabbitmq/rabbitmq-server/issues/3080) or metrics for a limited number of queues (for operations such as `GET /api/queues/{vhost}`) when operating in mixed version mode.
+
+
+
+## Compatibility Notes
+
+
+### Client Library Compatibility
+
+Client libraries that were compatible with RabbitMQ `3.8.x` will be compatible with `3.9.0`.
+
+
+### Upgrading to Erlang 24.x
+
+RabbitMQ `3.8.16` and later require Erlang `23.2` and support Erlang `24` (recommended). Users of RabbitMQ versions that [do not support Erlang 23.2](https://www.rabbitmq.com/which-erlang.html) should first upgrade to the latest `3.8.x` patch release on Erlang 23.2, then upgrade to `3.9.0`, and ideally Erlang 24.
+
+
+### Getting Help
+
+Any questions about this release, upgrades or RabbitMQ in general are welcome on the [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users).
+
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+
+### Core Server
+
+#### Enhancements
+
+ * [Streams](http://rabbitmq.com/streams.html), a new durable replicated data structure that allows for repeatable (non-destructive) consumption. RabbitMQ 3.9 introduces a new binary protocol for working with streams. While streams can be used by AMQP 0-9-1 clients, in most cases it will be used via a [RabbitMQ stream binary protocol](http://rabbitmq.com/stream.html) client. The new protocol exposes all the operations specific to this queue type and is heavily optimized for them, resulting in much higher read and write throughput.
+
+ [Java](https://github.com/rabbitmq/rabbitmq-stream-java-client) and [Go](https://github.com/rabbitmq/rabbitmq-stream-go-client) clients
+
+ * Structural logging in JSON. Nodes can now log in JSON format.
+
+ GitHub issue: [#2861](https://github.com/rabbitmq/rabbitmq-server/pull/2861)
+
+ * [Peer discovery](https://www.rabbitmq.com/cluster-formation.html) no longer uses randomized startup delay to ensure only one node starts as a seed one during initial (parallel) cluster formation. Instead it relies on an internal runtime locking library.
+
+ GitHub issue: [#3075](https://github.com/rabbitmq/rabbitmq-server/pull/3075)
+
+ * Binding recovery on node startup is now more efficient. This means in clusters with a lot of queues and/or bindings, nodes start quicker.
+
+ GitHub issue: [#3137](https://github.com/rabbitmq/rabbitmq-server/pull/3137)
+
+ * Classic mirrored queue replcas are now started synchronously.
+
+ GitHub issue: [#2751](https://github.com/rabbitmq/rabbitmq-server/pull/2751)
+
+ * `journald` notifications are now performed using a library and not an external tool (`socat`)
+
+ GitHub issue: [#2940](https://github.com/rabbitmq/rabbitmq-server/pull/2940)
+
+ * Startup banner now provides more information some of which can be customised.
+
+ GitHub issues: [#2247](https://github.com/rabbitmq/rabbitmq-server/pull/2247), [#2777](https://github.com/rabbitmq/rabbitmq-server/pull/2777)
+
+ * Per-protocol and queue type message rates are now available as Prometheus metrics, as well as Streams-specific errors. This also addresses a long-standing 3.8.x counters issue for most metrics of this type, as captured in [#2783](https://github.com/rabbitmq/rabbitmq-server/issues/2783).
+
+ GitHub issues: [#3127](https://github.com/rabbitmq/rabbitmq-server/pull/3127), [#3136](https://github.com/rabbitmq/rabbitmq-server/pull/3136), [#3157](https://github.com/rabbitmq/rabbitmq-server/pull/3157)
+
+#### Internal API and Other Changes
+
+* Standard runtime logger is now used for logging instead of Lager, a 3rd party dependency. In modern Erlang versions, the module offers at least a comparable feature set and efficiency of Lager.
+
+ Users who relied on Lager-specific configuration, e.g. custom extensions or sinks, would have to rebuild them on top of the standard Erlang `logger` module.
+
+ GitHub issue: [#2861](https://github.com/rabbitmq/rabbitmq-server/pull/2861)
+
+* Different Raft-based features now can have different defaults and settings, with more configurability coming at a later point. For example, client ID tracking in the MQTT plugin can use settings for lower memory footprint and more frequent compaction while stream and quorum queues use settings more optimal for higher throughput on the same node.
+
+ GitHub issue: [#2909](https://github.com/rabbitmq/rabbitmq-server/pull/2909)
+
+* Continuous integration of open source RabbitMQ has switched to Bazel, GitHub Actions and [BuildBuddy](https://buildbuddy.io), resulting in much faster and incremental test runs.
+
+ [Bazel support for Erlang](https://github.com/rabbitmq/bazel-erlang) is a new project open sourced by the RabbitMQ Core team as a result.
+
+* Process group membership now uses `pg`.
+
+ GitHub issue: [#2850](https://github.com/rabbitmq/rabbitmq-server/pull/2850)
+
+#### Bug Fixes
+
+ * `powershell.exe` now can run without profile
+
+ GitHub issue: [#2201](https://github.com/rabbitmq/rabbitmq-server/pull/2201)
+
+ * Queue index recovery now can continue after a forced node shutdown during recovery (e.g. killed)
+
+ GitHub issue: [#3099](https://github.com/rabbitmq/rabbitmq-server/pull/3099)
+
+ * Correct an issue where the `num_acceptors.ssl` configuration value was applied incorrectly
+
+ GitHub issue: [#3180](https://github.com/rabbitmq/rabbitmq-server/pull/3180)
+
+ * Correct an issue where the `--tags` flag was ignored if `--description` was not specified in the `rabbitmqctl add_vhost` command
+
+
+### Kubernetes
+
+#### Enhancements
+
+ * [RabbitMQ Cluster Operator](https://www.rabbitmq.com/kubernetes/operator/operator-overview.html) automates provisioning, management, and operations of RabbitMQ clusters running on Kubernetes.
+
+ * [Messaging Topology Operator](https://www.rabbitmq.com/kubernetes/operator/operator-overview.html#topology-operator) makes it possible to define RabbitMQ resources (virtual hosts, users, permissions, topologies, policies, etc) as Kubernetes objects.
+
+ * Kubernetes [peer discovery](https://www.rabbitmq.com/cluster-formation.html) mechanism no longer uses randomized delays to avoid the inherent race condition during [initial cluster formation](https://www.rabbitmq.com/cluster-formation.html#initial-formation-race-condition). Instead it relies on an internal distributed locking mechanism available in modern Erlang releases.
+
+ While randomized startup delay was reasonably effective at ensuring only one cluster was formed even during parallel node startup, it by definitely assumes that with some low probability more than one node can still start as seed nodes, and be joined by different groups of cluster peers. The locking mechanism does not have this downside. Now that RabbitMQ requires Erlang 23.2, the internal locking library becomes an option for peer discovery.
+
+ `cluster_formation.randomized_startup_delay_range.min` and `cluster_formation.randomized_startup_delay_range.max` configuration keys will no longer have any effect.
+
+ GitHub issue: [#3075](https://github.com/rabbitmq/rabbitmq-server/pull/3075)
+
+
+### AWS Peer Discovery Plugin
+
+#### Enhancements
+
+ * [AWS peer discovery mechanism](https://www.rabbitmq.com/cluster-formation.html#peer-discovery-aws) now supports [Instance Metadata Service v2](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-instance-metadata-service.html). In case it is not available, requests will fall back to the original metadata service endpoint.
+
+ The v2 endpoint offers substantial security improvements and is one of the [AWS best practices](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-standards-fsbp-controls.html#ec2-8-remediation).
+
+ Contributed by @thuandb (AWS).
+
+ GitHub issue: [#2952](https://github.com/rabbitmq/rabbitmq-server/pull/2952)
+
+
+### Federation Plugin
+
+#### Enhancements
+
+ * Quorum queues now can be federated.
+
+ GitHub issue: [#2804](https://github.com/rabbitmq/rabbitmq-server/pull/2804)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Management UI now honors queue name length limits
+
+ GitHub issue: [#3202](https://github.com/rabbitmq/rabbitmq-server/pull/3202)
+
+ * Management UI now sets a correct www-authenticate response header on authentication failures
+
+ GitHub issue: [#3205](https://github.com/rabbitmq/rabbitmq-server/pull/3205)
+
+
+### Dependency Changes
+
+* cowboy has been upgraded from `2.6.1` to `2.8.0`
+* cowlib has been upgraded from `2.7.0` to `2.9.1`
+* lager has been removed
+* jsx has been upgraded from `2.9.0` to `3.1.0`
+* observer_cli has been upgraded from `1.5.2` to `1.6.2`
+* osiris had been introduced at `0.1.0`
+* prometheus has been upgraded from `4.6.0` to `4.8.1`
+* ra has been upgraded from `1.1.9` to `2.0.0`
+* ranch has been upgraded from `1.7.1` to `2.0.0`
+* recon has been upgraded from `2.5.0` to `2.5.1`
+* seshat had been introduced at `0.1.0`
+* socat has been removed
+* syslog has been upgraded from `3.4.5` to `4.0.0`
+* systemd had been introduced at `0.6.1`
diff --git a/release-notes/3.9.1.md b/release-notes/3.9.1.md
new file mode 100644
index 0000000000..841bef16c9
--- /dev/null
+++ b/release-notes/3.9.1.md
@@ -0,0 +1,70 @@
+RabbitMQ `3.9.1` is a maintenance release in the `3.9.x` release series. [This is what changed since v3.9.0](https://github.com/rabbitmq/rabbitmq-server/compare/v3.9.0...v3.9.1).
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if you are upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.0.5 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+### Core Server
+
+#### Bug Fixes
+
+ * Clients that used global QoS prefetch (deprecated as of 3.9.0) ran into an exception when acknowledging deliveries.
+
+ GitHub issue: [#3230](https://github.com/rabbitmq/rabbitmq-server/pull/3230)
+
+ * Improved resiliency of stream coordinator in certain conditions.
+
+ GitHub issues: [#3176](https://github.com/rabbitmq/rabbitmq-server/pull/3176), [#3170](https://github.com/rabbitmq/rabbitmq-server/pull/3170)
+
+ * Nodes failed to start when hostname contained non-ASCII (broader Unicode) characters.
+
+ GitHub issue: [#3239](https://github.com/rabbitmq/rabbitmq-server/pull/3239)
+
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmq-diagnostics stream_status` fails with an exception when the `--tracking` option was used.
+
+ GitHub issue: [#3229](https://github.com/rabbitmq/rabbitmq-server/pull/3229)
+
+ * `rabbitmq-diagnostics stream_status` used an outdated documentation guide link.
+
+ GitHub issue: [#3231](https://github.com/rabbitmq/rabbitmq-server/pull/3231)
+
+
+### RabbitMQ Erlang Client
+
+#### Bug Fixes
+
+ * New releases of the client are again published to Hex.pm.
+
+ GitHub issue: [#3247](https://github.com/rabbitmq/rabbitmq-server/pull/3247)
+
+ * `connection_timeout` was adjusted to avoid a confusing warning.
+
+ GitHub issue: [#3232](https://github.com/rabbitmq/rabbitmq-server/pull/3232) contributed by @britto
+
+ * Corrected a typo in direct connection net tick time adjustment.
+
+ GitHub issue: [#3233](https://github.com/rabbitmq/rabbitmq-server/pull/3233) contributed by @britto
+
+
+## Dependency Upgrades
+
+No dependency changes in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.1.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.10.md b/release-notes/3.9.10.md
new file mode 100644
index 0000000000..b3eba3292d
--- /dev/null
+++ b/release-notes/3.9.10.md
@@ -0,0 +1,55 @@
+RabbitMQ `3.9.10` is a maintenance release in the `3.9.x` release series.
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports Erlang 24. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and a more consistent release schedule.
+
+### Core Server
+
+#### Bug Fixes
+
+ * Make stream coordinator more defensive to rapid declaration and deletion cycles
+
+ GitHub issue: [#3731](https://github.com/rabbitmq/rabbitmq-server/pull/3731)
+
+#### Enhancements
+
+ * Several inter-node communication listener settings are now exposed to `rabbitmq.conf`:
+
+ ``` ini
+ # this port range is used by default
+ distribution.listener.port_range.min = 25675
+ distribution.listener.port_range.max = 25675
+ # instead of listening on all interfaces
+ distribution.listener.interface = 192.168.0.1
+ ```
+
+ GitHub issue: [#3739](https://github.com/rabbitmq/rabbitmq-server/issues/3739)
+
+
+### OAuth 2 Plugin
+
+#### Bug Fixes
+
+ * Signing keys specified in `rabbitmq.conf` were not translated correctly,
+ resulting in exceptions during permission checks.
+
+ GitHub issue: [#3759](https://github.com/rabbitmq/rabbitmq-server/pull/3759)
+
+
+## Dependency Upgrades
+
+* Ra was [upgraded to `2.0.3`](https://github.com/rabbitmq/ra/compare/v2.0.2...v2.0.3)
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.10.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.11.md b/release-notes/3.9.11.md
new file mode 100644
index 0000000000..a897704d33
--- /dev/null
+++ b/release-notes/3.9.11.md
@@ -0,0 +1,85 @@
+RabbitMQ `3.9.11` is a maintenance release in the `3.9.x` release series.
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports Erlang 24. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and a more consistent release schedule.
+
+### Core Server
+
+#### Bug Fixes
+
+ * When a TLS-enabled listener failed to stop, it logged all of its settings
+ which could contain sensitive values.
+
+ GitHub issue: [#3803](https://github.com/rabbitmq/rabbitmq-server/issues/3803)
+
+#### Enhancements
+
+ * Quorum queues now store commands for enqueued messages in a more compact format
+ on disk (some derivative data has been dropped).
+
+ GitHub issue: [#3804](https://github.com/rabbitmq/rabbitmq-server/pull/3804)
+
+ * `queue.declare-ok` response to AMQP 0-9-1 clients operating on a stream could return a ready-for-delivery
+ message count value that was out of sync with the stream leader replica.
+
+ GitHub issue: [#3814](https://github.com/rabbitmq/rabbitmq-server/pull/3814)
+
+ * Classic queues now deliver more efficiently on channels that had global QoS changed
+ during consumer operation.
+
+ Contributed by @tomyouyou.
+
+ GitHub issue: [#3805](https://github.com/rabbitmq/rabbitmq-server/pull/3805)
+
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * New optional metrics provided by the `GET /metrics/detailed` endpoint. These metrics are **cluster-wide**,
+ and therefore must not be aggregated.
+
+ GitHub issue: [#3779](https://github.com/rabbitmq/rabbitmq-server/pull/3779)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * A help tip on the policies page was misplaced.
+
+ Contributed by @johanrhodin
+
+ GitHub issue: [#3825](https://github.com/rabbitmq/rabbitmq-server/pull/3825)
+
+
+### Management Agent Plugin
+
+#### Enhancements
+
+ * Disabling the plugin will stop metric collection performed periodically by
+ queues, streams, connections, et cetera.
+
+ Contributed by @tomyouyou.
+
+ GitHub issue: [#3800](https://github.com/rabbitmq/rabbitmq-server/pull/3800)
+
+
+
+## Dependency Upgrades
+
+None in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.11.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.2.md b/release-notes/3.9.2.md
new file mode 100644
index 0000000000..3b090731d6
--- /dev/null
+++ b/release-notes/3.9.2.md
@@ -0,0 +1,49 @@
+RabbitMQ `3.9.2` is a maintenance release in the `3.9.x` release series. [This is what changed since v3.9.1](https://github.com/rabbitmq/rabbitmq-server/compare/v3.9.1...v3.9.2).
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if you are upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.0.5 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+### CLI Tools
+
+#### Bug Fixes
+
+ * `rabbitmq-upgrade drain` and `rabbitmq-upgrade revive` now log `warning` and `info` level instead of `alert`.
+
+ GitHub issue: [#3266](https://github.com/rabbitmq/rabbitmq-server/pull/3266)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Multiple Shovels could be started in some cases involving node restarts of failures.
+
+ GitHub issue: [#3260](https://github.com/rabbitmq/rabbitmq-server/issues/3260)
+
+
+### Federation Plugin
+
+#### Bug Fixes
+
+ * Multiple Federation links could be started in some cases involving node restarts of failures.
+
+ GitHub issue: [#3260](https://github.com/rabbitmq/rabbitmq-server/issues/3260)
+
+
+
+## Dependency Upgrades
+
+ * Osiris has been [updated to 1.1.0](https://github.com/rabbitmq/osiris/compare/v1.0.0..v1.1.0)
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.2.tar.xz` instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.3.md b/release-notes/3.9.3.md
new file mode 100644
index 0000000000..c26adab40d
--- /dev/null
+++ b/release-notes/3.9.3.md
@@ -0,0 +1,51 @@
+RabbitMQ `3.9.3` is a maintenance release in the `3.9.x` release series that addresses a regression introduced in `3.9.0`.
+All users are recommended to upgrade to this release.
+
+[This is what changed since v3.9.2](https://github.com/rabbitmq/rabbitmq-server/compare/v3.9.3...v3.9.3).
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if you are upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.0.5 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+### Core Server
+
+#### Bug Fixes
+
+ * Queues that had messages with per-message TTL in them could fail to recover their indices after a restart.
+
+ GitHub issues: [#3284](https://github.com/rabbitmq/rabbitmq-server/pull/3284), [#3272](https://github.com/rabbitmq/rabbitmq-server/discussions/3272)
+
+ * JSON logging could fail with an exception when a logged event included epoch-based timestamp value.
+
+ GitHub issue: [#3281](https://github.com/rabbitmq/rabbitmq-server/pull/3281)
+
+ * JSON logging now uses integers (as opposed to floats) to represent epoch-based timestamps
+
+ GitHub issue: [#3282](https://github.com/rabbitmq/rabbitmq-server/pull/3282)
+
+### Enhancements
+
+ * RabbitMQ application no longer reports the `stopping` event to systemd. This was counterproductive when the application was stopped but the runtime (Erlang VM) was kept running,
+ e.g. via `rabbitmqctl stop_app` or by the pause minority partition handling strategy.
+
+ Now systemd service monitoring will recognize a node as stopped only when the runtime is stopped, which is usually the behavior operators expect.
+
+ GitHub issues: [#3289](https://github.com/rabbitmq/rabbitmq-server/issues/3289), [#3280](https://github.com/rabbitmq/rabbitmq-server/pull/3280)
+
+
+
+## Dependency Upgrades
+
+No dependency changes in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.3.tar.xz` instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.4.md b/release-notes/3.9.4.md
new file mode 100644
index 0000000000..8ffad5e054
--- /dev/null
+++ b/release-notes/3.9.4.md
@@ -0,0 +1,81 @@
+RabbitMQ `3.9.4` is a maintenance release in the `3.9.x` release series.
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.0.5 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+### Core Server
+
+#### Enhancements
+
+ * Nodes will now use four more environment variables, if set: `RABBITMQ_DEFAULT_USER` (overrides `default_user` in `rabbitmq.conf`), `RABBITMQ_DEFAULT_PASS` (overrides `default_pass`), `RABBITMQ_DEFAULT_VHOST` (overrides `default_vhost`) and `RABBITMQ_ERLANG_COOKIE` (sets [shared authentication secret value](https://www.rabbitmq.com/clustering.html#erlang-cookie)).
+ These variables **are not recommended to be used in production** but can be the only realistic option in some environment, such as service containers, ECS, and so on.
+ Most users should continue using `rabbitmq.conf` and a securely generated local cookie file.
+
+ GitHub issue: [#3299](https://github.com/rabbitmq/rabbitmq-server/pull/3299)
+
+ * Definitions now can be imported from different sources, including those provided by plugins. Original local filesystem source is still supported in a backwards-compatible way.
+
+ The following `rabbitmq.conf` example uses a local file as the source:
+
+ ``` ini
+ # equivalent to the classic load_definitions configuration key
+ definitions.import_backend = local_filesystem
+
+ definitions.local.path = /path/to/definitions.json
+ ```
+
+ This `rabbitmq.conf` example uses a local directory with definition files:
+
+ ``` ini
+ # equivalent to the classic load_definitions configuration key
+ definitions.import_backend = local_filesystem
+
+ definitions.local.path = /path/to/rabbitmq/definitions.d
+ ```
+
+ In this example config file, definitions are loaded from a URL accessible over HTTPS:
+
+ ``` ini
+ # downloads definitions over HTTPS
+ definitions.import_backend = https
+
+ definitions.https.url = https://rabbitmq.eng.megacorp.local/env-1/definitions.json
+
+ # client-side TLS options for definition import
+ definitions.tls.versions.1 = tlsv1.2
+ definitions.tls.log_level = error
+ ```
+
+ GitHub issue: [#3249](https://github.com/rabbitmq/rabbitmq-server/issues/3249)
+
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * New Prometheus metrics for alarms:
+ * `rabbitmq_alarms_file_descriptor_limit` 1|0
+ * `rabbitmq_alarms_free_disk_space_watermark` 1|0
+ * `rabbitmq_alarms_memory_used_watermark` 1|0
+
+ While some of the alarms have cluster-wide effect, these metrics are node-local.
+
+ GitHub issue: [#2653](https://github.com/rabbitmq/rabbitmq-server/pull/2653)
+
+
+## Dependency Upgrades
+
+No dependency changes in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.4.tar.xz` instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.5.md b/release-notes/3.9.5.md
new file mode 100644
index 0000000000..7be0956170
--- /dev/null
+++ b/release-notes/3.9.5.md
@@ -0,0 +1,56 @@
+RabbitMQ `3.9.5` is a maintenance release in the `3.9.x` release series.
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.0.5 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and more
+consistent release schedule.
+
+### Core Server
+
+#### Bug Fixes
+
+ * Virtual host metadata (description, tags) was not imported from definitions.
+
+ GitHub issue: [#3333](https://github.com/rabbitmq/rabbitmq-server/pull/3333)
+
+ * Reduced unnecessary debug logging from streams.
+
+ GitHub issue: [#3279](https://github.com/rabbitmq/rabbitmq-server/pull/3279)
+
+
+### AWS Peer Discovery Plugin
+
+#### Enhancements
+
+ * AWS API calls are now retried multiple times.
+
+ Contributed by AWS.
+
+ GitHub issue: [#3329](https://github.com/rabbitmq/rabbitmq-server/pull/3329)
+
+
+### Management Plugin
+
+#### Enhancements
+
+ * `PUT /api/vhosts/{name}` now can update metadata (tags and descriptions) for existing
+ virtual hosts.
+
+ GitHub issue: [#3319](https://github.com/rabbitmq/rabbitmq-server/pull/3319)
+
+
+
+## Dependency Upgrades
+
+No dependency changes in this release.
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.5.tar.xz` instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.6.md b/release-notes/3.9.6.md
new file mode 100644
index 0000000000..8f55b0e080
--- /dev/null
+++ b/release-notes/3.9.6.md
@@ -0,0 +1,69 @@
+RabbitMQ `3.9.6` is a maintenance release in the `3.9.x` release series.
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.0.5 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and a more consistent release schedule.
+
+
+### Core Server
+
+#### Bug Fixes
+
+ * TLS information delivered in [Proxy protocol](https://www.rabbitmq.com/networking.html#proxy-protocol) header is now attached to connection metrics as if it was provided by a non-proxying client.
+
+ GitHub issues: [#3175](https://github.com/rabbitmq/rabbitmq-server/pull/3175), [#3371](https://github.com/rabbitmq/rabbitmq-server/pull/3371) contributed by @prefiks, sponsored by CloudAMQP
+
+ * `max_message_size` had a one-off error in the validator.
+
+ GitHub issue: [#3398](https://github.com/rabbitmq/rabbitmq-server/pull/3398)
+
+ * `mirroring_sync_batch_size` was incorrectly validated as if it represented batch size in bytes.
+ It represents batch size in number of messages, so the new default hard cap is now 1M (a very high number that's impractical)
+
+ GitHub issue: [#3398](https://github.com/rabbitmq/rabbitmq-server/pull/3398)
+
+### Stream Plugin
+
+#### Bug Fixes
+
+ * Offset parameters were not stored correctly in some cases.
+
+ GitHub issue: [#3360](https://github.com/rabbitmq/rabbitmq-server/pull/3360), contributed by @korsmakolnikov
+
+ * Partitions list order is now stable.
+
+ GitHub issue: [#3423](https://github.com/rabbitmq/rabbitmq-server/pull/3423)
+
+ * When stream clients close connections abruptly, publisher and consumer metrics get cleaned up correctly.
+
+ GitHub issue: [#3340](https://github.com/rabbitmq/rabbitmq-server/pull/3340)
+
+### Management Plugin
+
+#### Enhancements
+
+ * Stream publishers are now listed on the individual stream page.
+
+ GitHub issue: [#3389](https://github.com/rabbitmq/rabbitmq-server/issues/3389)
+
+ * Counters have been added to the tiles of several sections on detail pages.
+
+ GitHub issue: [#3422](https://github.com/rabbitmq/rabbitmq-server/pull/3422)
+
+
+## Dependency Upgrades
+
+ * Osiris was [upgraded to `1.2.0`](https://github.com/rabbitmq/osiris/compare/v1.1.0...v1.2.0)
+ * Ranch was [upgraded to `2.1.0`](https://github.com/ninenines/ranch/compare/2.0.0...2.1.0)
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.6.tar.xz` instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.7.md b/release-notes/3.9.7.md
new file mode 100644
index 0000000000..410c5ef619
--- /dev/null
+++ b/release-notes/3.9.7.md
@@ -0,0 +1,64 @@
+RabbitMQ `3.9.7` is a maintenance release in the `3.9.x` release series.
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.0.5 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and a more consistent release schedule.
+
+### All Components
+
+ * All bytecode is now compiled using the `+deterministic` compiler flag. This should eliminate the capture of some irrelevant build environment attributes in produced artifacts, improve consistency between builds, and reduce the file level diff between release artifacts.
+
+ GitHub issue: [#3442](https://github.com/rabbitmq/rabbitmq-server/pull/3442)
+
+
+### Core Server
+
+#### Enhancements
+
+ * Classic queue shutdown now uses a much higher timeout (up to 10 minutes instead of 30 seconds).
+
+ In environments with many queues (especially mirrored queues) and many consumers this means that
+ the chance of queue indices rebuilding after node restart is now substantially lower.
+
+ GitHub issue: [#3409](https://github.com/rabbitmq/rabbitmq-server/pull/3409)
+
+#### Bug Fixes
+
+* Quorum queues no longer leak memory and disk space when a consumer is repeatedly added and cancelled on an empty queue.
+
+ GitHub issue: [#3445](https://github.com/rabbitmq/rabbitmq-server/issues/3445)
+
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * More configurability for metrics exposed via the Prometheus endpoint.
+
+ GitHub issue: [#3421](https://github.com/rabbitmq/rabbitmq-server/pull/3421)
+
+
+### Shovel Plugin
+
+#### Bug Fixes
+
+ * Shovel URIs could be logged with credentials in some scenarios.
+
+ GitHub issue: [#3476](https://github.com/rabbitmq/rabbitmq-server/pull/3476), contributed by @thuandb (AWS)
+
+
+## Dependency Upgrades
+
+ * observer_cli has been upgraded from `1.6.2` to `1.7.1`
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.7.tar.xz` instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.8.md b/release-notes/3.9.8.md
new file mode 100644
index 0000000000..90433c04d4
--- /dev/null
+++ b/release-notes/3.9.8.md
@@ -0,0 +1,83 @@
+RabbitMQ `3.9.8` is a maintenance release in the `3.9.x` release series.
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.1.2 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and a more consistent release schedule.
+
+### Core Server
+
+#### Bug Fixes
+
+* When the mandatory flag was used when publishing to classic queues,
+ but publisher confirms were not, channels memory usage would grow indefinitely.
+
+ GitHub issue: [#3560](https://github.com/rabbitmq/rabbitmq-server/issues/3560)
+
+* `rabbitmq-diagnostics memory_breakdown` failed to read memory of connection
+ reader, writer and channel processes.
+
+ GitHub issue: [#3570](https://github.com/rabbitmq/rabbitmq-server/issues/3570)
+
+* In some environments, Stream replicas advertised IP addresses that could not be reached by cluster peers
+ (eg. IP addresses behind a NAT in a Docker deployment). RabbitMQ node hostnames are now advertised as well
+ so that other peers can resolve them to get an externally visible IP address.
+
+ GitHub issue: [rabbitmq/osiris#53](https://github.com/rabbitmq/osiris/issues/53)
+
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * More data is exposed via the `GET /metrics/detailed` endpoint.
+
+ GitHub issue: [#3520](https://github.com/rabbitmq/rabbitmq-server/pull/3520)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * When setting topic permissions, the list of exchanges in the UI now honors the
+ currently selected virtual host.
+
+ Contributed by @LuisCusihuaman.
+
+ GitHub issue: [#3545](https://github.com/rabbitmq/rabbitmq-server/pull/3545)
+
+
+### AWS Peer Discovery Plugin
+
+#### Enhancements
+
+ * The plugin now logs more details for failed AWS API requests.
+
+ Contributed by @tvhong-amazon (AWS)
+
+ GitHub issue: [#3579](https://github.com/rabbitmq/rabbitmq-server/pull/3579)
+
+
+### Web STOMP Plugin
+
+#### Enhancements
+
+ * STOMP-over-WebSockets connections now can consume from [streams](https://rabbitmq.com/streams.html).
+
+ GitHub issue: [#3509](https://github.com/rabbitmq/rabbitmq-server/pull/3509)
+
+
+## Dependency Upgrades
+
+* Osiris was [upgraded to `1.2.2`](https://github.com/rabbitmq/osiris/compare/v1.2.0...v1.2.2)
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.8.tar.xz` instead of the source tarball produced by GitHub.
diff --git a/release-notes/3.9.9.md b/release-notes/3.9.9.md
new file mode 100644
index 0000000000..7b9c74d76e
--- /dev/null
+++ b/release-notes/3.9.9.md
@@ -0,0 +1,93 @@
+RabbitMQ `3.9.9` is a maintenance release in the `3.9.x` release series.
+
+Please refer to the **Upgrading to 3.9** section from [v3.9.0 release notes](https://github.com/rabbitmq/rabbitmq-server/releases/tag/v3.9.0) if upgrading from a version prior to 3.9.0.
+
+This release requires at least Erlang 23.2, and supports the latest Erlang 24 version, 24.1.2 at the time of release. [RabbitMQ and Erlang/OTP Compatibility Matrix](https://www.rabbitmq.com/which-erlang.html) has more details on Erlang version requirements for RabbitMQ.
+
+
+
+## Changes Worth Mentioning
+
+Release notes are kept under [rabbitmq-server/release-notes](https://github.com/rabbitmq/rabbitmq-server/tree/v3.9.x/release-notes).
+Contributors are encouraged to update them together with their changes. This helps with release automation and a more consistent release schedule.
+
+### Core Server
+
+#### Bug Fixes
+
+ * Fixed an issue where node monitor could produce a false network partition when
+ one of the cluster members was restarted
+
+ GitHub issue: [#3631](https://github.com/rabbitmq/rabbitmq-server/pull/3631)
+
+ * Message store resiliency improvements
+
+ GitHub issue: [#3615](https://github.com/rabbitmq/rabbitmq-server/pull/3615)
+
+ * Reduced log noise in certain scenarios where a new queue leader is elected
+
+ GitHub issue: [#3628](https://github.com/rabbitmq/rabbitmq-server/pull/3628)
+
+ * Queue leader rebalancing now logs less
+
+ GitHub issue: [#3643](https://github.com/rabbitmq/rabbitmq-server/pull/3643)
+
+#### Enhancements
+
+ * `cluster_formation.target_cluster_size_hint` is a new configuration setting that can be used to specify expected initial cluster size.
+ This can be used by features, plugins or tools that expect a certain minimum number of clusters nodes
+ to join during initial cluster formation
+
+ GitHub issue: [#3635](https://github.com/rabbitmq/rabbitmq-server/pull/3635)
+
+
+### Prometheus Plugin
+
+#### Enhancements
+
+ * Metric rendering efficiency improvements (mostly latency)
+
+ GitHub issue: [#3587](https://github.com/rabbitmq/rabbitmq-server/pull/3587)
+
+#### Bug Fixes
+
+ * TLS-enabled Prometheus endpoint listener port was not correctly stored internally
+
+ GitHub issue: [#2975](https://github.com/rabbitmq/rabbitmq-server/issues/2975)
+
+
+### Management Plugin
+
+#### Bug Fixes
+
+ * Persistent message count is now displayed correctly on individual queue pages
+
+ GitHub issue: [#3598](https://github.com/rabbitmq/rabbitmq-server/pull/3598)
+
+ * Restore compatibility with IE 11
+
+ GitHub issue: [#3689](https://github.com/rabbitmq/rabbitmq-server/pull/3689)
+
+
+### Consistent Hashing Exchange Plugin
+
+#### Bug Fixes
+
+ * Corrected deletion of duplicate bindings between an exchange and a queue
+
+ Contributed by @FalconerTC
+
+ GitHub issue: [#3594](https://github.com/rabbitmq/rabbitmq-server/pull/3594)
+
+
+## Dependency Upgrades
+
+* Ra was [upgraded to `2.0.2`](https://github.com/rabbitmq/ra/compare/v2.0.0...v2.0.2)
+
+* Osiris was [upgraded to `1.2.3`](https://github.com/rabbitmq/osiris/compare/v1.2.2...v1.2.3)
+
+
+## Source Code Archives
+
+To obtain source code of the entire distribution, please download the archive named `rabbitmq-server-3.9.9.tar.xz`
+instead of the source tarball produced by GitHub.
diff --git a/release-notes/README-1.1.0-alpha.txt b/release-notes/README-1.1.0-alpha.txt
new file mode 100644
index 0000000000..72fb04a225
--- /dev/null
+++ b/release-notes/README-1.1.0-alpha.txt
@@ -0,0 +1,84 @@
+Release : RabbitMQ 1.1.0-alpha
+
+Release Highlights
+==================
+
+RabbitMQ server
+---------------
+- support for clustering and load balancing
+- near-linear scaling across multiple nodes on fanout
+- various performance improvements
+- more resilient startup sequence
+- improved error reporting and logging, particularly during startup
+- improved test coverage
+- improved protocol conformance, particluarly in the area of
+ connection and channel management
+- fixed a number of race conditions in corner cases of the protocol
+- made several parameters, such as tcp buffer size, more easily
+ adjustable
+- supervision tree for most of the processes, for improved resilience
+- better support for building under Windows
+- new rabbmitqctl commands, for clustering, broker status, and more
+- improved rabbitmqctl success and failure reporting
+- improved documentation for build, install, administration
+- tested against latest Erlang/OTP release - R11B-5
+
+Java client
+-----------
+- support for clustering and load balancing
+- improved and better documented API
+- improved error handling and reporting
+- new QueuingConsumer class for easy asynchronous message processing
+- restructured internals
+- fixed several race conditions and deadlocks, and some other bugs
+- support for build under Java 6 and under Windows
+
+Packaging
+---------
+- more resilient and easier configurable startup scripts
+- fixed several bugs in Debian packaging
+- RabbitMQ now runs as a separate user
+- new Fedora and Suse RPM packages
+- new Debian and RPM packages for Java client
+- "binary" Java client packages for Java >=1.5 and <1.5
+- streamlined packaging process
+
+
+Upgrading
+=========
+
+If you are upgrading from an earlier release of RabbitMQ, note that
+RabbitMQ's internal database schema has changed, and hence the
+MNESIA_BASE directory should be cleared before starting the new
+server. This wipes out any configuration information and persisted
+messages.
+
+The upgrade process will be much improved in future releases, to the
+point where a running RabbitMQ cluster can be upgraded without service
+interruption. Meanwhile, if you need assistance in migration please
+contact the RabbitMQ team at rabbitmq-sales@pivotal.io.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Antonio Cordova
+Carl Bourne
+David Pollack
+David MacIver
+Francesco Cesarini
+Gerald Loeffler
+Hunter Morris
+Jason Pellerin
+Jeff Rogers
+Martin Logan
+Matt Darling
+Michael Newton
+Neil Bartlett
+Norbert Klamann
+Robert Greig
+Wannes Sels
+Warren Pfeffer
diff --git a/release-notes/README-1.1.1.txt b/release-notes/README-1.1.1.txt
new file mode 100644
index 0000000000..4728c1b734
--- /dev/null
+++ b/release-notes/README-1.1.1.txt
@@ -0,0 +1,61 @@
+Release: RabbitMQ 1.1.1
+Status : beta
+
+Release Highlights
+==================
+
+RabbitMQ server
+---------------
+- improved interoperability with Qpid M1 clients
+- fixed a bug in persistent message re-delivery that caused RabbitMQ
+ to fail when attempting to re-deliver messages after a restart
+- fixed a performance problem that caused throughput to drop
+ significantly for larger message sizes
+- fixed a bug in amqqueue:stat_all/0 that caused it to fail
+- refactored some internals in order to support additional transports
+ more easily
+
+Java client
+-----------
+- improved interoperability with Qpid M1 Java server
+- changed threading model to stop clients from exiting when there are
+ open AMQP connections
+- extended API to allow setting of frameMax and channelMax
+
+Packaging
+---------
+- included main test suite runner in source distribution
+- dropped version status (i.e. alpha, beta, etc) from file and dir
+ names
+- renamed server erlang package dir to "rabbitmq_server-<version>", to
+ comply with Erlang/OTP conventions
+
+
+Upgrading
+=========
+
+Upgrading to this release from RabbitMQ 1.1.0 requires no special
+steps at the server end. There have been some minor changes to the
+Java client API. Most client code is unlikely to be affected by these,
+and is easily changed if it is.
+
+If you are upgrading from an earlier release of RabbitMQ, note that
+RabbitMQ's internal database schema has changed, and hence the
+MNESIA_BASE directory should be cleared before starting the new
+server. This wipes out any configuration information and persisted
+messages.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Ben Hood
+James Wolstenholme
+Jeff Rogers
+Maximillian Dornseif
+Michael Arnoldus
+Steve Jenson
+Tanmay Goel
diff --git a/release-notes/README-1.2.0.txt b/release-notes/README-1.2.0.txt
new file mode 100644
index 0000000000..fea8e5b203
--- /dev/null
+++ b/release-notes/README-1.2.0.txt
@@ -0,0 +1,63 @@
+Release: RabbitMQ 1.2.0
+Status : beta
+
+Release Highlights
+==================
+
+RabbitMQ server
+---------------
+- introduced internal flow control to prevent performance drops when
+ running a server near capacity
+- simplified cluster configuration and added "automatic" clustering
+- made rabbitmqctl command line syntax less confusing
+- fixed a couple of race conditions that could result in a client
+ receiving unexpected sequences of command replies and messages
+- refactored internals to make it easier to construct handlers for
+ transports other than raw TCP/IP
+
+Java client
+-----------
+- fixed a race condition between invocation of a Consumer's
+ handle{Consume,Cancel}Ok and handleDelivery methods; the former are
+ now called inside the connection's thread, just like the latter
+
+Packaging
+---------
+- simplified rabbitmqctl invocation under Debian
+- moved default location of the log and mnesia dirs under Windows to
+ sub directories of the RABBITMQ_BASE directory
+- changed startup scripts to allow the rabbitmq_server package to
+ reside outside the OTP library directory
+
+
+Upgrading
+=========
+
+Under Windows the default location of the mnesia directory has changed
+from %RABBITMQ_BASE% to %RABBITMQ_BASE%\db. If you have an existing
+installation that uses the old default location and you would like to
+retain the server state (including persisted messages) then just move
+the *.DAT, *.DCD, *.DCL and *.LOG files from that directory to the new
+location.
+
+There have been some minor changes to the Java client API. Most client
+code is unlikely to be affected by these, and is easily changed if it
+is.
+
+If you are upgrading from RabbitMQ-1.0.0 note that RabbitMQ's internal
+database schema has changed, and hence the MNESIA_BASE directory
+should be cleared before starting the new server. This wipes out any
+configuration information and persisted messages.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Ben Hood
+Emmanuel Okyere
+Holger Hoffstätte
+Jodi Moran
+Robert Greig
diff --git a/release-notes/README-1.3.0.txt b/release-notes/README-1.3.0.txt
new file mode 100644
index 0000000000..18418a6fed
--- /dev/null
+++ b/release-notes/README-1.3.0.txt
@@ -0,0 +1,96 @@
+Release: RabbitMQ 1.3.0
+Status : beta
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- eliminate a number of race conditions that could result in message
+ loss and other incorrect or unusual behaviour
+- eliminate duplication of messages when bindings overlap
+- prevent unbounded memory usage when topic exchanges encounter
+ messages with highly variable routing keys
+- redesigned persister so it works properly in a clustered broker
+- fix a couple of bugs that could cause persisted messages to stick
+ around forever, resulting in an unbounded memory usage
+- prevent performance drop under high load
+- do not requeue messages on tx.rollback
+- fix bug in heartbeat logic that could result in a connection
+ remaining alive even though the client had stopped sending any data
+- correct handling of queue.bind with empty routing key and queue name
+- complain about acks with an unknown delivery tag
+- prevent sending of zero-length content body frames
+
+enhancements
+- improve error reporting for various framing-related errors
+- improve rabbitmq-multi robustness and error reporting
+- identify log locations in startup message
+- keep log file contents on server restart
+- support QPid's extended field types
+- improve performance, particularly for persistent messaging
+- re-architect internals to eliminate some code duplication, reduce
+ component dependencies, and construct cleaner internal APIs
+
+Java client
+-----------
+bug fixes
+- eliminate edge case that could result in stuck AMQConnection.close
+- use linear timers to prevent heartbeat timeouts on system clock
+ adjustment, which happens in some virtualisation platforms
+- eliminate a race condition that could result in an exception when
+ establishing a connection
+
+enhancements
+- add SSL support
+- improve error reporting for various framing-related errors
+- add new FileProducer/Consumer example
+- make MulticastMain example more versatile, with improved command
+ line options
+- improve performance
+
+packaging
+---------
+bug fixes
+- fix broken 'rabbitmqctl -n' on Debian
+- fix broken removal of the rabbitmq-server Debian package
+- fix broken Erlang library installation on 64bit RPM-based systems
+- fix failure of server shutdown when started at boot time on Debian
+- fix various problems with RPMs
+
+improvements
+- better compliance with debian packaging policies
+
+
+Upgrading
+=========
+
+Care must be taken when upgrading a server that contains persisted
+messages. The persister log format has changed between RabbitMQ-1.2.0
+and this release. When RabbitMQ-1.3.0 first starts following an
+upgrade it will move the existing persister log to a backup file -
+check the log files for details. Thus the previously persisted
+messages are not lost, but neither are they replayed. Therefore it is
+recommended that the upgrade is performed only when there are no
+important persistent messages remaining.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Andrew Munn
+Barry Pederson
+Ben Hood
+David Pollak
+Emmanuel Okyere
+Joe Lee
+John Leuner
+Matt Darling
+Michael Arnoldus
+Nick Levine
+Tom Samplonius
+Willem van Heemstra
diff --git a/release-notes/README-1.4.0.txt b/release-notes/README-1.4.0.txt
new file mode 100644
index 0000000000..0127bfa467
--- /dev/null
+++ b/release-notes/README-1.4.0.txt
@@ -0,0 +1,108 @@
+Release: RabbitMQ 1.4.0
+Status : beta
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- maintain message order on persister replay
+- do not throw away work on connection.close
+- eliminate possibility of generating duplicate message ids when
+ broker is restarted
+- deal with race conditions during queue creation that could result in
+ a queue being reported as 'not_found' when it did in fact exist, and
+ the converse, or multiple queue processes being created per queue.
+- suppress a few errors that would be logged in various connection
+ shutdown scenarios but turn out to be harmless
+- ensure preservation of content properties during persistence - this
+ was working fine for the AMQP transport, but not the STOMP gateway
+- fix various other small bugs
+
+enhancements
+- improve performance of queue creation
+- add explanatory text to errors returned to the client and reported
+ in the logs
+- rationalise severities of logged errors, and log information
+ allowing the correlation of log events with connections
+- return 'connection_forced' error to clients on broker shutdown
+- supervise queue processes
+- improve/refactor internal APIs to assist in creation of extensions
+- add type signature for all exported functions, and type check code
+ with dialyzer
+- generate AMQP codec from JSON representation of protocol spec
+
+Java client
+-----------
+bug fixes
+- completely revamp connection and channel closure handling, resolving
+ a number race conditions and other bugs in the process and resulting
+ in a cleaner, more comprehensive and consistent API
+- correct a few minor errors in the javadocs
+
+enhancements
+- generate AMQP codec from JSON representation of protocol spec
+
+building & packaging
+--------------------
+bug fixes
+- only source /etc/default/rabbitmq in init.d scripts, thus stopping
+ it from overriding env var settings
+- pay attention to NODE* env vars in rabbitmq-multi script, thus
+ allowing users to easily start multiple nodes with non-default
+ settings for the node name, ip and port.
+- make rpm update work
+
+enhancements
+- on Windows, place the server logs and db in a dir under the per-user
+ %APPDATA% directory
+- display names of nodes started by rabbitmq-multi
+- migrate from cvs to hg, and split into separate repositories for
+ server, java client, etc.
+- clean up and refactor Makefiles
+- avoid hanging for 30s when an invalid database schema is detected
+
+
+Upgrading
+=========
+
+The database schema has changed in this version of RabbitMQ. If you
+attempt to start RabbitMQ-1.4.0 over top of a previous installation,
+it will fail, citing "schema_integrity_check_failed". To correct this,
+delete your mnesia directory (on most platforms,
+/var/lib/rabbitmq/mnesia) and restart the server. Note that this will
+destroy all your durable exchanges and queues, and all your persisted
+messages!
+
+Care must be taken when upgrading a server that contains persisted
+messages. The persister log format has changed between RabbitMQ-1.3.0
+and this release. When RabbitMQ-1.4.0 first starts following an
+upgrade it will move the existing persister log to a backup file -
+check the log files for details. Thus the previously persisted
+messages are not lost, but neither are they replayed. Therefore it is
+recommended that the upgrade is performed only when there are no
+important persistent messages remaining.
+
+Due to a bug in the rpm packaging, which has now been fixed, users
+with an existing rpm-based installation of the RabbitMQ server should
+first remove the rabbitmq-server package ('rpm -e rabbitmq-server')
+before proceeding with the normal upgrade.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Andrew Statsenko
+David Corcoran
+Dmitriy Samovskiy
+Holger Hoffstaette
+John Leuner
+Kyle Salasko
+Lars Bachmann
+Michael Arnoldus
+Petr Sturc
+Sean Treadway
diff --git a/release-notes/README-1.5.0.txt b/release-notes/README-1.5.0.txt
new file mode 100644
index 0000000000..22d51be400
--- /dev/null
+++ b/release-notes/README-1.5.0.txt
@@ -0,0 +1,143 @@
+Release: RabbitMQ 1.5.0
+Status : beta
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- support running on top of the latest Erlang/OTP release (R12B-5)
+- maintain effect visibility guarantees in cross-node routing
+- reduce likelihood of timeouts when channels interact with a large
+ number of queues
+- graceful handling of some corner cases in abrupt client disconnect
+
+enhancements
+- remove tickets and realms
+- improve scalability of queue and binding creation and deletion
+- add 'queue.unbind' command to protocol
+- disable Nagle for more consistent latency
+- throttle producers with 'channel.flow' when running low on memory
+ Note that this feature is disabled by default; please see
+ https://www.rabbitmq.com/admin-guide.html#memsup for details.
+- remove a few spurious errors in the logs
+- show the actual listening IP & port in logs
+- improve rabbitmqctl:
+ - add a few useful info commands
+ - add a 'reopen_logs' command to assist in log management
+ - add a '-q' flag to suppress informational messages and thus
+ facilitate post-processing of the output
+ - write errors to stderr instead of stdout
+
+Java client
+-----------
+bug fixes
+- eliminate several race condition in connection and channel closure
+ that could result in deadlock
+- always respond to a server's 'connection.close' and 'channel.close'
+- prevent interference between interal locking on channels and
+ application-level locking
+
+enhancements
+- remove tickets and realms
+- support 'queue.unbind'
+- disable Nagle for more consistent latency
+- react to server-issued 'channel.flow' by (un)blocking producers
+- add channel.abort method to unconditionally and idempotently close a
+ channel
+- complete the set of channel and connection close and abort methods
+
+.net client
+-----------
+bug fixes
+- eliminate "Cannot access a disposed object" exception in connection
+ closure and disposal
+- correct heartbeat logic to prevent spurious timeouts when not idling
+
+enhancements
+- remove tickets and realms
+- support 'queue.unbind'
+- disable Nagle for more consistent latency
+- react to server-issued 'channel.flow' by (un)blocking producers
+- add IModel.abort method to unconditionally and idempotently close a
+ channel
+- complete the set of channel and connection close and abort methods
+
+building & packaging
+--------------------
+bug fixes
+- correct locations of libraries et al on 64bit rpm systems
+
+enhancements
+- detect upgrade from pre-1.5.0 and warn/ask user (under debian & rpm)
+- comply with debian and rpm packaging policies and guidelines
+- prevent accidental executing of scripts as non-root user under
+ debian & rpm
+- read /etc/default/rabbitmq in scripts on Unix-y systems, with env
+ vars taking precedence over vars set in that file and the defaults
+ used by the scripts
+- prefix env vars with 'RABBITMQ_'
+- allow script execution from anywhere (not just the scripts' dir)
+- add script & instructions to start RabbitMQ as a Windows service
+- add 'status' command to init.d script under debian & rpm
+- automatic log rotation under debian & rpm
+- use simplejson.py instead of json.py in code generation, thus
+ allowing use of Python 2.6
+
+
+Upgrading
+=========
+
+The database schema has changed between RabbitMQ-1.4.0 and this
+release. When the RabbitMQ server detects the presence of an old
+database, it moves it to a backup location, creates a fresh, empty
+database, and logs a warning.
+
+If your RabbitMQ installation contains important data, such as user
+accounts, durable exchanges and queues, or persistent messages, then
+we recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Alister Morton
+Aman Gupta
+Andrius Norkaitis
+Barry Pedersen
+Benjamin Black
+Benjamin Polidore
+Brian Sullivan
+David Corcoran
+Dmitriy Samovskiy
+Edwin Fine
+Eran Sandler
+Esteve Fernandez
+Ezra Zygmuntowicz
+Ferret
+Gavin Bong
+Geoffrey Anderson
+Holger Hoffst�tte
+Jacek Korycki
+John Leuner
+Jonatan Kallus
+Jonathan McGee
+Kyle Sampson
+Leo Martins
+Maarten Engelen
+Nathan Woodhull
+Nigel Verdon
+Paul Jones
+Pete Kay
+Peter Kieltyka
+Sarah Jelinek
+Sean Treadway
+Steve Jenson
+Terry Jones
+Vadim Zaliva
+Valentino Volonghi
diff --git a/release-notes/README-1.5.1.txt b/release-notes/README-1.5.1.txt
new file mode 100644
index 0000000000..006eef4d98
--- /dev/null
+++ b/release-notes/README-1.5.1.txt
@@ -0,0 +1,81 @@
+Release: RabbitMQ 1.5.1
+Status : final
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- handle race condition between routing and queue deletion that could
+ cause errors on message publication
+- the default settings for RABBITMQ_SERVER_ERL_ARGS were not taken
+ into account, resulting in decreased network performance
+- add workaround for the Erlang/OTP bug OTP-7025, which caused errors
+ in topic routing in Erlang/OTP versions older than R12B-1
+- display the nodes of queue and connection processes in rabbitmqctl's
+ list_{queues,connections} command. Previously we displayed pids,
+ which was broken and not particularly useful.
+
+enhancements
+- enable 'channel.flow'-based producer throttling by default on Linux
+- include stack traces in error reports in rabbit.log
+- speed up rabbitmqctl and rabbitmq-multi
+
+Java client
+-----------
+no significant changes
+
+.net client
+-----------
+bug fixes
+- handle race condition in client-initiated connection closure that
+ could result in an OperationInterruptedException
+
+enhancements
+- re-enable heartbeating by default
+
+building & packaging
+--------------------
+bug fixes
+- fix bug that caused removal of RPMs to be incomplete
+- produce separate RPMs for SuSE-like systems to resolve various
+ incompatibilities
+- rename BUILD to README in order to prevent build failures on systems
+ with case-insensitive file naming (such as OS X)
+
+enhancements
+- minor tweaks in Debian and RPM packaging for better compliance with
+ packaging guidelines
+- better handling of errors encountered during code generation
+
+
+Upgrading
+=========
+
+No special precautions are necessary when upgrading from
+RabbitMQ-1.5.0.
+
+When upgrading from earlier releases, note that the database schema
+has changed in RabbitMQ-1.5.x. When the RabbitMQ server detects the
+presence of an old database, it moves it to a backup location, creates
+a fresh, empty database, and logs a warning.
+
+If your RabbitMQ installation contains important data, such as user
+accounts, durable exchanges and queues, or persistent messages, then
+we recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Aymerick Jéhanne
+Chuck Remes
+Ezra Zygmuntowicz
+Glenn Robuck
+Mathias Gug
+Michael Barker
diff --git a/release-notes/README-1.5.2.txt b/release-notes/README-1.5.2.txt
new file mode 100644
index 0000000000..22acf3b74d
--- /dev/null
+++ b/release-notes/README-1.5.2.txt
@@ -0,0 +1,79 @@
+Release: RabbitMQ 1.5.2
+Status : final
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- prevent the shell from attempting to perform path expansion on vars
+ in the startup scripts, which was resulting in errors when starting
+ rabbit on some systems.
+- make guid generation independent of persister, thus preventing
+ timeouts when the persister is busy
+- get scripts to source configuration from /etc/rabbitmq/rabbitmq.conf
+ rather than /etc/default/rabbitmq, since the latter is reserved for
+ init.d scripts.
+
+Java client
+-----------
+bug fixes
+- eliminate race condition in server-initiated channel closure that
+ could lead to deadlock
+
+.net client
+-----------
+bug fixes
+- eliminate race condition in server-initiated channel closure that
+ could lead to deadlock
+
+building & packaging
+--------------------
+enhancements
+- minor tweaks in Debian and RPM packaging for better compliance with
+ packaging guidelines
+- place wrapper scripts for rabbitmq-server and rabbitmq-multi
+ alongside the rabbitmqctl wrapper in /usr/sbin
+- do not start the server by default on RPM-based systems, in order to
+ comply with common practice and guidelines
+- suppress stdout in logrotate scripts, to keep cron et al happy
+
+Upgrading
+=========
+
+The place from which the server startup and control scripts source
+configuration information on Unix systems has changed from
+/etc/default/rabbitmq to /etc/rabbitmq/rabbitmq.conf. If you have been
+using the former, just move the file to the latter location. The
+/etc/default/rabbitmq file (/etc/sysconfig/rabbitmq on RPM-based
+systems) is still being sourced by the init.d script, but it should
+only contain settings directly affecting the behaviour of the init.d
+script, such as NODE_COUNT.
+
+When upgrading from releases earlier than RabbitMQ-1.5.x, note that
+the database schema has changed. When the RabbitMQ server detects the
+presence of an old database, it moves it to a backup location, creates
+a fresh, empty database, and logs a warning.
+
+If your RabbitMQ installation contains important data, such as user
+accounts, durable exchanges and queues, or persistent messages, then
+we recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Billy Chasen
+Charl Matthee
+Christopher Hoover
+Darien Kindlund
+Dmitriy Samovskiy
+Jason Williams
+Mathias Gug
+Peter Lemenkov
+Phil Stubbings
diff --git a/release-notes/README-1.5.3.txt b/release-notes/README-1.5.3.txt
new file mode 100644
index 0000000000..996cd057f3
--- /dev/null
+++ b/release-notes/README-1.5.3.txt
@@ -0,0 +1,79 @@
+Release: RabbitMQ 1.5.3
+Status : final
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- prevent the shell from attempting to perform path expansion on vars
+ in the startup scripts, which was resulting in errors when starting
+ rabbit on some systems.
+- make guid generation independent of persister, thus preventing
+ timeouts when the persister is busy
+- get scripts to source configuration from /etc/rabbitmq/rabbitmq.conf
+ rather than /etc/default/rabbitmq, since the latter is reserved for
+ init.d scripts.
+
+Java client
+-----------
+bug fixes
+- eliminate race condition in server-initiated channel closure that
+ could lead to deadlock
+
+.net client
+-----------
+bug fixes
+- eliminate race condition in server-initiated channel closure that
+ could lead to deadlock
+
+building & packaging
+--------------------
+enhancements
+- minor tweaks in Debian and RPM packaging for better compliance with
+ packaging guidelines
+- place wrapper scripts for rabbitmq-server and rabbitmq-multi
+ alongside the rabbitmqctl wrapper in /usr/sbin
+- do not start the server by default on RPM-based systems, in order to
+ comply with common practice and guidelines
+- suppress stdout in logrotate scripts, to keep cron et al happy
+
+Upgrading
+=========
+
+The place from which the server startup and control scripts source
+configuration information on Unix systems has changed from
+/etc/default/rabbitmq to /etc/rabbitmq/rabbitmq.conf. If you have been
+using the former, just move the file to the latter location. The
+/etc/default/rabbitmq file (/etc/sysconfig/rabbitmq on RPM-based
+systems) is still being sourced by the init.d script, but it should
+only contain settings directly affecting the behaviour of the init.d
+script, such as NODE_COUNT.
+
+When upgrading from releases earlier than RabbitMQ-1.5.x, note that
+the database schema has changed. When the RabbitMQ server detects the
+presence of an old database, it moves it to a backup location, creates
+a fresh, empty database, and logs a warning.
+
+If your RabbitMQ installation contains important data, such as user
+accounts, durable exchanges and queues, or persistent messages, then
+we recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Billy Chasen
+Charl Matthee
+Christopher Hoover
+Darien Kindlund
+Dmitriy Samovskiy
+Jason Williams
+Mathias Gug
+Peter Lemenkov
+Phil Stubbings
diff --git a/release-notes/README-1.5.4.txt b/release-notes/README-1.5.4.txt
new file mode 100644
index 0000000000..47d52cccae
--- /dev/null
+++ b/release-notes/README-1.5.4.txt
@@ -0,0 +1,81 @@
+Release: RabbitMQ 1.5.4
+Status : final
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- starting a RabbitMQ instance that contains a large number
+ (thousands) durable exchanges, queues or bindings now takes just a
+ few seconds instead of several minutes.
+- on Windows, rabbitmq-multi.bat can now start RabbitMQ even when the
+ path to the startup script contains spaces, whereas previously that
+ would fail.
+- on Windows, the rabbitmqctl.bat and rabbitmq-multi.bat scripts now
+ report errors correctly instead of swallowing them.
+
+enhancements
+- make the default settings of the various env vars which can be set
+ in rabbitmq.conf visible to that script, thus permitting more
+ advanced manipulation of the settings than was previously possible.
+- permit configuration of rabbitmqctl's Erlang start parameters by
+ sourcing rabbitmq.conf from the script and adding some env vars.
+- on Windows, rabbitmq-server.bat and rabbitmq-multi.bat can now be
+ configured with the RABBITMQ_{SERVER,MULTI}_{ERL,START}_ARGS env
+ vars.
+
+Java client
+-----------
+no changes
+
+.net client
+-----------
+no changes
+
+building & packaging
+--------------------
+bug fixes
+- correct paths in 64-bit RPMs; the paths got broken in the 1.5.3
+ release, preventing the server from starting
+- in the Debian and RPM packages, set the current working dir of the
+ various scripts to /var/lib/rabbitmq instead of /. The latter was
+ preventing crash dumps from being written.
+- fix BSD incompatibility in 'make srcdist'
+
+enhancements
+- minor tweaks in Debian and RPM packaging for better compliance with
+ packaging guidelines
+
+
+Upgrading
+=========
+
+When upgrading from releases earlier than 1.5.3, note that the place
+from which the server startup and control scripts source configuration
+information on Unix systems has changed from /etc/default/rabbitmq to
+/etc/rabbitmq/rabbitmq.conf. If you have been using the former, just
+move the file to the latter location. The /etc/default/rabbitmq file
+(/etc/sysconfig/rabbitmq on RPM-based systems) is still being sourced
+by the init.d script, but it should only contain settings directly
+affecting the behaviour of the init.d script, such as NODE_COUNT.
+
+When upgrading from releases earlier than 1.5.x, note that the
+database schema has changed. When the RabbitMQ server detects the
+presence of an old database, it moves it to a backup location, creates
+a fresh, empty database, and logs a warning. If your RabbitMQ
+installation contains important data, such as user accounts, durable
+exchanges and queues, or persistent messages, then we recommend you
+contact rabbitmq-sales@pivotal.io for assistance with the upgrade.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Alex Clemesha
+Aymerick Jehanne
+John Leuner
diff --git a/release-notes/README-1.5.5.txt b/release-notes/README-1.5.5.txt
new file mode 100644
index 0000000000..0e2fe4ed67
--- /dev/null
+++ b/release-notes/README-1.5.5.txt
@@ -0,0 +1,91 @@
+Release: RabbitMQ 1.5.5
+Status : final
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- in a clustered setup, bindings to durable queues are now correctly
+ recovered when a queue's node restarts.
+- node failure in a clustered setup could trigger premature exchange
+ auto-deletion
+- the cluster config file name was inadvertently changed from
+ rabbitmq_cluster.config to cluster.config in release 1.5.4. It has
+ now been changed back.
+- when attempting to delete a non-existing exchange, return 404 (not
+ found), as defined by the spec, rather than 541 (internal error)
+- correct some type specs to keep dialyzer happy
+
+enhancements
+- display the node name and database dir on startup
+
+Java client
+-----------
+bug fixes
+- correct semantics of connection.tune's channel-max parameter - it
+ determines the range of usable channel numbers - from 1 to
+ channel-max, inclusive. Previously the highest channel number we
+ allowed was channel-max - 1.
+- correct misleading javadoc for GetResponse.getMessageCount().
+
+enhancements
+- improve error reporting
+
+.net client
+-----------
+bug fixes
+- correct semantics of connection.tune's channel-max parameter - it
+ determines the range of usable channel numbers - from 1 to
+ channel-max, inclusive. Previously the highest channel number we
+ allowed was channel-max - 1.
+
+building & packaging
+--------------------
+bug fixes
+- work around absence of escript in path on some Fedora/EPEL
+ installations
+- make build work with python 2.4, which Fedore/EPEL 5 is on
+- work around possible bug in Debian packaging of Erlang OTP R13,
+ which misses a dependency on os-mon in erlang-nox
+
+enhancements
+- minor tweaks in RPM packaging for better compliance with packaging
+ guidelines
+
+
+Upgrading
+=========
+
+When upgrading from releases earlier than 1.5.3, note that the place
+from which the server startup and control scripts source configuration
+information on Unix systems has changed from /etc/default/rabbitmq to
+/etc/rabbitmq/rabbitmq.conf. If you have been using the former, just
+move the file to the latter location. The /etc/default/rabbitmq file
+(/etc/sysconfig/rabbitmq on RPM-based systems) is still being sourced
+by the init.d script, but it should only contain settings directly
+affecting the behaviour of the init.d script, such as NODE_COUNT.
+
+When upgrading from releases earlier than 1.5.x, note that the
+database schema has changed. When the RabbitMQ server detects the
+presence of an old database, it moves it to a backup location, creates
+a fresh, empty database, and logs a warning. If your RabbitMQ
+installation contains important data, such as user accounts, durable
+exchanges and queues, or persistent messages, then we recommend you
+contact rabbitmq-sales@pivotal.io for assistance with the upgrade.
+
+
+Credits
+=======
+
+We would like to thank the following individuals for submitting bug
+reports and feedback that we incorporated into this release:
+
+Aaron Cline
+Bradford Cross
+John Leuner
+Levi Greenspan
+Peter Lemenkov
+Rob Golkosky
+Steve Marah
diff --git a/release-notes/README-1.6.0.txt b/release-notes/README-1.6.0.txt
new file mode 100644
index 0000000000..7f224630c5
--- /dev/null
+++ b/release-notes/README-1.6.0.txt
@@ -0,0 +1,86 @@
+Release: RabbitMQ 1.6.0
+Status : final
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- eliminate potential memory leak of transactionally published
+ messages in certain system errors
+- prevent possible starvation of some consumers on channels that get
+ temporarily blocked due to backlogs
+- do not send more messages to backlogged channels when accepting a
+ new consumer
+- prevent possible message reordering in the event of temporary node
+ failure in a clustered setup
+- return 'not_found' error in all cases of 'queue.unbind' attempting
+ to remove a non-existing binding
+
+enhancements
+- implement AMQP's basic.qos' channel prefetch count limiting
+- implement AMQP 0-9/0-9-1's headers exchange type
+- introduce a permissions system which allows fine-grained access
+ control on resources involved in AMQP operations. See
+ https://www.rabbitmq.com/admin-guide.html#access-control for details
+- introduce 'alternate exchanges' to handle messages which are
+ otherwise unroutable. See
+ https://www.rabbitmq.com/extensions.html#alternate-exchange for
+ details
+- improve performance and stability under high load
+- reduce memory consumption
+- prefix all mnesia tables with "rabbit_" in order to prevent name
+ clashes with other Erlang applications, in particular ejabberd
+- handle rabbitmqctl commands with higher priority, thus ensuring that
+ answers are returned promptly even under high load
+- reduce severity of "connection closed abruptly" log event from
+ 'error' to 'warning'
+
+Java client
+-----------
+enhancements
+- support extended list of table field types
+
+.net client
+-----------
+bug fixes
+- make unit tests work under .NET 2.0
+
+enhancements
+- rename public fields to avoid name clashes with properties that trip
+ up tools like Powershell
+- suppress inclusion of spec comments in generated code due to
+ licensing issues
+- generate strong named (i.e. signed) assemblies. See the bottom of
+ https://www.rabbitmq.com/dotnet.html for details
+
+building & packaging
+--------------------
+enhancements
+- introduce wrapper scripts in macports, as in the Debian and RPM
+ packaging, which ensure the real scripts are run as the right
+ ('rabbitmq') user
+- remove build-time dependency on mnesia
+- trim Debian Erlang package dependencies
+- auto-generate the module list in rabbit.app, thus ensuring it is
+ always up to date
+
+Upgrading
+=========
+
+When upgrading from releases earlier than 1.5.3, note that the place
+from which the server startup and control scripts source configuration
+information on Unix systems has changed from /etc/default/rabbitmq to
+/etc/rabbitmq/rabbitmq.conf. If you have been using the former, just
+move the file to the latter location. The /etc/default/rabbitmq file
+(/etc/sysconfig/rabbitmq on RPM-based systems) is still being sourced
+by the init.d script, but it should only contain settings directly
+affecting the behaviour of the init.d script, such as NODE_COUNT.
+
+The database schema has changed. When the RabbitMQ server detects the
+presence of an old database, it moves it to a backup location, creates
+a fresh, empty database, and logs a warning. If your RabbitMQ
+installation contains important data, such as user accounts, durable
+exchanges and queues, or persistent messages, then we recommend you
+contact rabbitmq-sales@pivotal.io for assistance with the upgrade.
diff --git a/release-notes/README-1.7.0.txt b/release-notes/README-1.7.0.txt
new file mode 100644
index 0000000000..738ae128f8
--- /dev/null
+++ b/release-notes/README-1.7.0.txt
@@ -0,0 +1,105 @@
+Release: RabbitMQ 1.7.0
+Status : beta
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- prevent timeouts of rabbitmqctl when the server is busy
+- prevent load avg calculation from failing under high load, which
+ could cause connection establishment to break
+- remove channel closing timeout since it can cause a protocol
+ violation
+- prevent client disconnects from sometimes resulting in enormous
+ error log entries and causing considerable CPU and memory pressure
+
+enhancements
+- support SSL natively - see <https://www.rabbitmq.com/ssl.html>
+- add a plugin mechanism to provide a framework for developing rabbit
+ extensions and managing their installation - see
+ <https://www.rabbitmq.com/plugin-development.html>
+- support configuration via erlang config file, which has fewer
+ escaping and formatting requirements than the other configuration
+ mechanisms - see <https://www.rabbitmq.com/install.html#configfile>
+- display diagnostics when rabbitmqctl fails with a badrpc error,
+ making it easier to track down the cause
+- improve queue hibernation logic to reduce system load in
+ pathological scenarios, like invocations of 'rabbitmqctl
+ list_queues' at one second intervals
+- increase consumer throughput under high load
+- improve performance of channel and connection termination
+- escape output of all rabbitmqctl commands
+- react to memory pressure more quickly
+- more graceful handling of some rare error conditions during
+ connection establishment, preventing spurious error log entries
+- display location of application descriptor on startup, to make it
+ easy to locate rabbit installations
+- in 'rabbitmqctl list_connections', display the connection state by
+ default, and no longer shows absent usernames as 'none', thus
+ avoiding possible confusion
+- add hook mechanism for altering/augmenting broker behaviour
+- add cute banner :)
+
+Java client
+-----------
+bug fixes
+- work around Java Hotspot bug that could cause channel number
+ allocation to return null
+
+enhancements
+- disable heartbeats by default
+- add queuePurge to API
+- make content properties (deep) cloneable
+
+.net client
+-----------
+bug fixes
+- fix a number of race conditions in the Subscription close/shutdown
+ logic, making it safe to close Subscriptions from any thread
+- allow SharedQueues (and thus QueuingBasicConsumer and Subscription)
+ to drain messages after close
+
+enhancements
+- disable heartbeats by default
+- make content properties (deep) cloneable
+- add some more AmqpTcpEndpoint constructors to API
+
+building & packaging
+--------------------
+bug fixes
+- prevent purging of server debian and RPM packages from failing due
+ to epmd still running
+- fix escaping/quoting corner cases in server control wrapper scripts
+- in RPM packages, eliminate spurious removal of server startup from
+ all run levels during upgrade
+
+enhancements
+- install server Erlang app in RabbitMQ-specific directory tree
+ (e.g. /usr/lib/rabbitmq) rather than the Erlang/OTP tree. This
+ allows OTP to be upgraded w/o breaking rabbit.
+- package .net client in Windows installer
+- include .net client in complete windows bundle
+- switch .net client build from nant to msbuild and include VS
+ solution
+- update complete windows bundle from Erlang/OTP R11B5 to R12B5
+- make installation work under MacPorts 1.8.0
+- make server buildable under freebsd
+- permit configuration of server startup log locations in
+ /etc/default/rabbitmq
+- improve formatting of man pages
+- do not stomp on RABBITMQ_* environment variables in server Makefile
+
+Upgrading
+=========
+The database schema has not changed since the 1.6.0 release, so user
+accounts, durable exchanges and queues, and persistent messages will
+all be retained during the upgrade.
+
+If, however, you are upgrading from a release prior to 1.6.0, when the
+RabbitMQ server detects the presence of an old database, it moves it
+to a backup location, creates a fresh, empty database, and logs a
+warning. If your RabbitMQ installation contains important data then we
+recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
diff --git a/release-notes/README-1.7.1.txt b/release-notes/README-1.7.1.txt
new file mode 100644
index 0000000000..e2b0835498
--- /dev/null
+++ b/release-notes/README-1.7.1.txt
@@ -0,0 +1,123 @@
+Release: RabbitMQ 1.7.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- correct various quoting errors in the Windows scripts that caused
+ them to fail
+- ensure that stalled ssl negotiation do not block further ssl
+ connection acceptance
+- prohibit the (re)declaration of queues that reside on node that is
+ currently stopped, thus preventing message loss or duplication when
+ that node recovers
+- eliminate race condition in queue auto-deletion, ensuring that it
+ has completed before channel/connection closure completes
+- ensure that ack processing cannot stall under heavy load when using
+ basic.qos
+- make plug-ins and config files work when running as a Windows
+ service
+- write crash dumps to a sensible location
+ (%APPDATA%\RabbitMQ\erl_crash.dump by default) when running as a
+ Windows service
+- get the Windows service to use Erlang/OTP R12B-5 by default, since
+ that, rather than R11B-5, is what we ship in the Windows bundle
+- correct formatting of plug-in activation errors
+- make column order of 'rabbitmqctl list_bindings' match the
+ documentation
+- do not escape spaces in rabbitmqctl output
+- prevent vars declared in Windows scripts from polluting the
+ environment
+- clean up properly when the rabbit Erlang application is stopped,
+ thus making it more well-behaved and easier to embed
+
+enhancements
+- make the various scripts work with complete short node names
+- improve memory monitoring and producer throttling. See the updated
+ documentation at https://www.rabbitmq.com/extensions.html#memsup.
+- make tcp_listeners configurable via the rabbitmq.config file
+- use the base64 module instead of ssl_base64 if we can, since the
+ latter is sometimes missing from Erlang installations
+- display pids instead of just nodes in 'rabbitmqctl list_connections'
+ and 'rabbitmqctl list_queues', to aid troubleshooting
+- add capability to display the transmitted client_properties in
+ 'rabbitmqctl list_connections'
+- extend codec with array type ('A')
+- add proper headers to auto-generated code
+
+Java client
+-----------
+bug fixes
+- eliminate race in connection establishment that could cause errors
+ to be reported in different ways
+- fix quoting in runjava.bat to stop it from tripping over exotic
+ Windows paths
+
+enhancements
+- enforce codec size limits, specifically on AMQP's shortstr type,
+ thus preventing the creation of invalid AMQP protocol frames
+- add support for basic.recover in the API
+- name threads, to aid troubleshooting
+- allow applications to adjust socket configuration, e.g. buffer sizes
+- extend codec with array type ('A')
+- throw a more informative exception (UnknownChannelException) when
+ receiving a frame for an unknown channel
+- add proper headers to auto-generated code
+
+.net client
+-----------
+bug fixes
+- close connections on app domain unload, thus preventing spurious
+ errors and possible connection leaks when the client is run in
+ certain app containers, e.g. IIS
+- close socket on ssl upgrade error, thus plugging a socket leak
+- resolve various bugs in the ssl negotiation code that cause it to
+ fail on .Net proper (though not mono)
+
+enhancements
+- improve performance by introducing I/O buffering
+- permit ssl connections that do not validate the server certificate
+- improve standard display of BrokerUnreachableException
+- make SharedQueue implement IEnumerable and allow multiple concurrent
+ enumerators per instance
+- switch the code gen to the BSD-licensed version of the AMQP spec
+- extend codec with array type ('A')
+- add proper headers to auto-generated code
+
+building & packaging
+--------------------
+bug fixes
+- stop Debian package purge from failing after plug-in (de)activation
+- when upgrading the rpm package, do not remove rabbit from any
+ runlevels
+- fix error handling in rabbit.app generation, ensuring that errors
+ are reported rather than written to the generated file
+- during Debian package removal, only kill epmd if it was started by
+ the rabbitmq user, in order to reduce the likelihood of interference
+ with other Erlang applications
+- resolve minor incompatibility with some versions of 'echo' that
+ could result in spurious '-e's appearing in script error messages
+
+enhancements
+- make MacPorts package work on Snow Leopard
+- streamline dependencies in MacPorts package
+- automate generation of MacPorts package and create a RabbitMQ
+ MacPorts repository - see https://www.rabbitmq.com/macports.html
+- mirror downloads onto Amazon Cloudfront, for better availability and
+ download speed
+- allow 'rabbitmq' user to execute the various wrapper scripts
+
+Upgrading
+=========
+The database schema has not changed since the 1.6.0 release, so user
+accounts, durable exchanges and queues, and persistent messages will
+all be retained during the upgrade.
+
+If, however, you are upgrading from a release prior to 1.6.0, when the
+RabbitMQ server detects the presence of an old database, it moves it
+to a backup location, creates a fresh, empty database, and logs a
+warning. If your RabbitMQ installation contains important data then we
+recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
diff --git a/release-notes/README-1.7.2.txt b/release-notes/README-1.7.2.txt
new file mode 100644
index 0000000000..a4c66001fd
--- /dev/null
+++ b/release-notes/README-1.7.2.txt
@@ -0,0 +1,75 @@
+Release: RabbitMQ 1.7.2
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- fix a number of problems with memory monitoring under Windows,
+ including compatibility issues with versions of Erlang/OTP older
+ than R13, and 64-bit versions of Windows. See the updated
+ documentation at https://www.rabbitmq.com/extensions.html#memsup
+- correct various path escaping bugs under Windows that could result
+ in RabbitMQ failing to start
+- make 'rabbitmq-multi start_all <n>' work again for n>1
+- issuing a basic.qos when there are outstanding acks can no longer
+ result in a higher limit than requested
+- enforce codec size limits, thus preventing the server from sending
+ invalid AMQP frames
+
+enhancements
+- add rabbitmqctl list_channels and list_consumers commands, and add
+ exclusivity information to list_queues. Also introduce a
+ close_connection command to allow an administrator to selectively
+ terminate client connections. See the updated admin guide at
+ https://www.rabbitmq.com/admin-guide.html for details on these new
+ features.
+- remove the explicit setting of TCP buffer sizes in the server, thus
+ allowing auto-(re)sizing to occur. This generally results in
+ substantially improved throughput over high-latency links, and makes
+ manual fine-tuning easier.
+- introduce declarative boot sequencing, which allows plugins to be
+ started at arbitrary chosen points during the sequence
+
+Java client
+-----------
+bug fixes
+- ensure that QueuingConsumer throws an ShutdownSignalException in
+ *all* consuming threads, not just one
+- fix race conditions in 'tracer' tool that could cause it to fail
+
+enhancements
+- make exception stack traces more meaningful
+- allow overriding of several RpcClient methods, for easier extension
+ and re-use
+- improve performance of channel creation for high channel counts
+- improve performance of 'tracer' tool
+- add option to 'tracer' tool to suppress content bodies, which is
+ useful for tracing connections carrying a high data volume
+- better exception reporting in 'tracer' tool
+
+.net client
+-----------
+enhancements
+- improve performance of channel creation for high channel counts
+
+building & packaging
+--------------------
+bug fixes
+- under macports, ensure env var settings are passed to the various
+ startup and control scripts
+
+
+Upgrading
+=========
+The database schema has not changed since the 1.6.0 release, so user
+accounts, durable exchanges and queues, and persistent messages will
+all be retained during the upgrade.
+
+If, however, you are upgrading from a release prior to 1.6.0, when the
+RabbitMQ server detects the presence of an old database, it moves it
+to a backup location, creates a fresh, empty database, and logs a
+warning. If your RabbitMQ installation contains important data then we
+recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
diff --git a/release-notes/README-1.8.0.txt b/release-notes/README-1.8.0.txt
new file mode 100644
index 0000000000..344e558acc
--- /dev/null
+++ b/release-notes/README-1.8.0.txt
@@ -0,0 +1,208 @@
+Release: RabbitMQ 1.8.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- prevent a change in host name from preventing RabbitMQ from being
+ restarted.
+- ensure that durable exclusive queues do not survive a restart of the
+ broker.
+- fix a race condition that could occur when concurrently declaring
+ exclusive queues.
+- ensure that queues being recovered by a node in a cluster cannot be
+ accessed via other nodes until the queue is fully initialised.
+- prevent bursts of declarations or deletions of queues or exchanges
+ from exhausting mnesia's transactional capacity.
+- prevent bursts of connections from exhausting TCP backlog buffers.
+- various corrections to documentation to correct discrepancies
+ between the website, the man pages, and the commands' usage outputs.
+
+enhancements
+------------
+- introduce a pluggable exchange type API permitting plugins to the
+ broker to define new exchange types which can then be used by
+ clients.
+- introduce a backing queue API permitting plugins to the broker to
+ define new ways in which messages can be stored.
+- several semantic changes to bring the behaviour inline with the AMQP
+ 0-9-1 spec:
+ + honour many of the queue exclusivity requirements for AMQP 0-9-1,
+ such as queue redeclaration, basic.get, queue.bind and
+ queue.unbind.
+ + honour exchange and queue equivalence requirements for AMQP 0-9-1,
+ especially for queue and exchange redeclaration.
+ + ensure that exclusive queues are synchronously deleted before the
+ connection fully closes.
+ + permit durable queues to be bound to transient exchanges.
+ + enforce detection and raising exceptions due to invalid and reused
+ delivery-tags in basic.ack rigorously
+ + queue.purge now does not remove unacknowledged messages.
+- require clients to respond to channel.flow messages within 10
+ seconds to avoid an exception being raised and more rigorously deal
+ with clients that disobey channel.flow messages. See
+ https://www.rabbitmq.com/extensions.html#memsup
+- the server now supports the client sending channel.flow messages to
+ temporarily halt the flow of deliveries to the client.
+- optimise cross-node routing of messages in a cluster scenario whilst
+ maintaining visibility guarantees.
+- ensure that clients who present invalid credentials cannot flood the
+ broker with requests.
+- drop support for versions of Erlang older than R12B-3.
+- ensure that the minimum number of frames are used to deliver
+ messages, regardless of incoming and outgoing frame sizes.
+- display the current version of Erlang when booting Rabbit, and
+ ensure the version is sufficiently youthful.
+- work around some name resolver issues, especially under Windows.
+- introduce a Pacemaker OCF script (and then fix it, thanks to patches
+ by Florian Haas) to permit RabbitMQ to be used in basic
+ active/passive HA scenarios (see
+ https://www.rabbitmq.com/pacemaker.html).
+
+
+java client
+-----------
+bug fixes
+- fix a race condition when closing channels which could lead to the
+ same channel being closed twice.
+- MulticastMain could calculate negative rates, due to integer
+ wrapping.
+- be consistent about naming conventions.
+
+enhancements
+- Java client is now available via Maven Central.
+- redesign the ConnectionFactory to be more idiomatic.
+- expose server properties in connection.start.
+- allow additional client properties to be set in connection.start_ok.
+- attempt to infer authentication failures and construct appropriate
+ exceptions.
+- MulticastMain now logs returned publishes.
+
+
+.net client
+-----------
+bug fixes
+- prevent memory leak due to DomainUnload event handler.
+- improvements to catching connections which are timing out.
+- ensure explicitly numbered closed channels return their channel
+ number to the pool correctly.
+- removed artificial limitation on maximum incoming message size.
+
+enhancements
+- expose server properties in connection.start.
+- allow additional client properties to be set in connection.start_ok.
+- attempt to infer authentication failures and construct appropriate
+ exceptions.
+
+
+code generation
+---------------
+enhancements
+- permit multiple specifications to easily be combined and merged.
+- permit any number of different "actions" in code generation.
+
+
+building & packaging
+--------------------
+bug fixes
+- stop the INSTALL file from being installed in the wrong place by the
+ Debian packages.
+
+enhancements
+- source rpm (.src.rpm) packages are now available
+- rpm packages are now noarch, matching the debs
+
+
+Upgrading
+=========
+The database schema and the format in which persistent messages are
+stored have both changed since the last release (1.7.2). When
+starting, the RabbitMQ server will detect the existence of an old
+database and will move it to a backup location, before creating a
+fresh, empty database, and will log a warning. If your RabbitMQ
+installation contains important data then we recommend you contact
+rabbitmq-sales@pivotal.io for assistance with the upgrade.
+
+
+Important notes on the AMQP 0-9-1 semantic changes
+==================================================
+
+This release incorporates a number of semantic changes to the broker
+behaviour which bring the broker more in-line with the AMQP 0-9-1
+specification. We don't think any of these changes are going to be a
+big problem for anyone, and will probably be irrelevant for most
+people. In almost all cases they're tightening up or tidying up edge
+cases where the 0-8 spec was incomplete or specified something
+unhelpful. However, it's probably worth reading the list below to make
+absolutely sure you're not depending on any of our existing weird
+behaviour.
+
+
+Reuse of delivery tags
+----------------------
+
+In previous versions of RabbitMQ, you could ack the same message with
+the same delivery tag multiple times. In 1.8.0 this will cause a
+not-found exception. Note that if a message is redelivered for any
+reason it will get a new delivery tag so you can ack it again.
+
+
+Exchange equivalence
+--------------------
+
+In previous versions of RabbitMQ you could actively declare an
+exchange with one set of durable and auto-delete parameters, then
+actively declare it again with different parameters and get the same
+exchange back. This now causes a precondition_failed exception, as it
+would if the type does not match. Note that with the old behaviour the
+exchange did not actually change to match the new parameters; you just
+got back something that was not what you asked for.
+
+In previous versions, when passively declaring an exchange, the type
+parameter was checked (but not the durable and auto-delete
+parameters). Now only the name is checked. Passive declaration cannot
+create an exchange, and exchanges are only identified by their
+name. Therefore it does not make sense to require the other parameters
+of exchange.declare to match the exchange declaration in the passive
+case.
+
+
+Queue equivalence
+-----------------
+
+Similarly, when actively redeclaring a queue you could vary the
+durable and auto-delete parameters and get back a queue which did not
+match what you asked for. Again, this is now causes a
+precondition_failed exception. Likewise, passive declaration of
+queues only needs to match on the queue name, not any other
+parameters.
+
+
+Purging unacknowledged messages
+-------------------------------
+
+When queue.purge is called, messages which had been send but not
+acknowledged used to be purged. Now they are not. This makes much more
+sense as consumers from a queue may have no idea whether or not a
+queue has been purged by some other client.
+
+
+Binding durable queues to transient exchanges
+---------------------------------------------
+
+This used not to be permitted. Now it is. The binding is considered
+transient.
+
+
+Queue exclusivity enforcement
+-----------------------------
+
+In previous versions of RabbitMQ, an exclusive queue could still be
+accessed by other connections for (un)binding or basic.get. This is now
+not permitted.
+
+Also, an exclusive queue would continue to exist for a short time after
+the connection was closed. It's now deleted while the connection is
+being closed (assuming that's happening in an orderly manner).
diff --git a/release-notes/README-1.8.1.txt b/release-notes/README-1.8.1.txt
new file mode 100644
index 0000000000..382d758a36
--- /dev/null
+++ b/release-notes/README-1.8.1.txt
@@ -0,0 +1,69 @@
+Release: RabbitMQ 1.8.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+ - unbinding from an auto-delete exchange produced an error
+ - the message count reported when declaring a queue was incorrect under rare
+ conditions
+ - it was possible for a channel.close_ok message to get lost in rare
+ circumstances
+
+enhancements
+------------
+ - clustering produces better error messages when clustering fails
+ - the AMQP 0.8 specification permitted a rare case of deadlock while closing
+ channels and connections. AMQP 0.9.1 forbids this condition and RabbitMQ now
+ implements the correction
+ - the AMQP basic.recover method is now synchronous by default - the
+ asynchronous version is still available, but deprecated
+ - the AMQP basic.recover method is now permitted in transacted channels, where
+ this was previously forbidden
+ - maximum AMQP frame size is specified more rigorously in AMQP 0.9.1 - RabbitMQ
+ now enforces the negotiated maximum frame size
+ - AMQP 0.9.1 guidance on error constants is now followed more closely and
+ 0.9.1 error codes are produced in more situations
+ - SSL compatiblity under R14A has been improved
+
+java client
+-----------
+enhancements
+ - the API can now report on channel flow events
+ - better handling of unsolicited messages and unknown consumer tags, by adding
+ a default consumer
+ - documentation enhancements around the use of AMQConnection
+
+.net client
+-----------
+enhancements
+ - better handling of unsolicited messages and unknown consumer tags, by adding
+ a default consumer
+ - documentation enhancements around the use of ConnectionFactory
+
+building & packaging
+--------------------
+bug fixes
+ - fix permission errors for commandline utilities in MacPorts
+
+enhancements
+ - compiles under Erlang R14A
+ - builds using GNU Make 3.80 - previously version 3.81 was required
+ - error output when using old versions of GNU Make has been added
+ - builds under RHEL5 and distributions with xmlto version 0.0.18
+ - better type-checking, making use of recent features in Dialyzer
+
+Upgrading
+=========
+The database schema has not changed since version 1.8.0, so user accounts,
+durable exchanges and queues, and persistent messages will all be retained
+during the upgrade.
+
+If, however, you are upgrading from a release prior to 1.8.0, when the
+RabbitMQ server detects the presence of an old database, it moves it to a
+backup location, creates a fresh, empty database, and logs a warning. If
+your RabbitMQ installation contains important data then we recommend you
+contact rabbitmq-sales@pivotal.io for assistance with the upgrade.
+
diff --git a/release-notes/README-2.0.0.txt b/release-notes/README-2.0.0.txt
new file mode 100644
index 0000000000..8128240016
--- /dev/null
+++ b/release-notes/README-2.0.0.txt
@@ -0,0 +1,98 @@
+Release: RabbitMQ 2.0.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- correct file descriptor limits on Windows, preventing crashes due to
+ exceeding the limit
+- fix bug that resulted in 'rabbitmqctl status' reporting disk nodes
+ as ram nodes
+- tx.commit no longer fails when participating queues are deleted
+ during the lifetime of the transaction
+- more robust logic to detect changes in the data layout between
+ rabbit versions and thus guard against upgrade failures
+
+enhancements
+- new persister
+ - the volume of messages rabbit can hold on to is bounded by disk
+ space (in previous versions it was bounded by memory)
+ - rabbit optimises memory usage by paging messages out to / in from
+ disk as needed
+ - consistently high performance regardless of retained message
+ volume (previous versions would slow down considerably as the
+ persisted message volume grew)
+ - consistently fast startup regardless of volume of persisted data
+ (previous versions would require time proportional to the amount
+ of data)
+ - better performance for concurrent transactions (in previous
+ version the rate at which queues could handle transactions
+ involving persistent messages was fixed)
+- implement AMQP 0-9-1, in addition to 0-8 - see
+ https://www.rabbitmq.com/specification.html
+- instrumentation for asynchronous statistics events, emitting more
+ stats than currently available and laying the foundation for
+ monitoring a busy broker without crippling performance -
+ see https://blog.rabbitmq.com/posts/2010/08/management-monitoring-and-statistics/
+- more effective flow control mechanism that does not require
+ cooperation from clients and reacts quickly to prevent the broker
+ from exhausing memory - see https://www.rabbitmq.com/extensions.html#memsup
+- implement basic.reject - see
+https://blog.rabbitmq.com/posts/2010/08/well-ill-let-you-go-basicreject-in-rabbitmq/
+- simplify plugin activation by moving it into the server startup
+- permit upgrades of Erlang w/o breaking rabbit servers with activated
+ plugins
+- introduce support for queue leases - see https://www.rabbitmq.com/extensions.html#queue-leases
+- improve the setting of permissions, making it easer to use and
+ introducing a way to grant no permissions at all - see https://www.rabbitmq.com/admin-guide.html#management
+- delete exclusive queues synchronously on server-initiated connection
+ close (rather than just client-initiated)
+
+java client
+-----------
+bug fixes
+- prevent spurious timeout exceptions that theoretically could have
+ arisen due to spurios thread wake-ups
+
+enhancements
+- switch to AMQP 0-9-1 - see
+ https://www.rabbitmq.com/specification.html
+
+.net client
+-----------
+bug fixes
+- fix bug that caused incorrect responses to server-issued
+ channel.flow commands, which in turn resulted in connections getting
+ closed with an error.
+- make SSL connections work in Mono
+
+enhancements
+- implement AMQP 0-9-1, in addition to 0-8 and 0-9 - see
+ https://www.rabbitmq.com/specification.html
+- simplify the Subscription class and make it more versatile
+- improve documentation
+
+building & packaging
+--------------------
+bug fixes
+- correct location of rabbitmq.config file under macports - it now
+ lives in /opt/local/etc/rabbitmq/
+- "make docs_all" now works on more platforms
+
+enhancements
+- portable, binary plugin releases to simplify plugin installation -
+ see https://www.rabbitmq.com/plugins.html
+- automatically create plugin directory, to streamline plugin
+ installation
+
+Upgrading
+=========
+The database schema and the format in which persistent messages are
+stored have both changed since the last release (1.8.1). When
+starting, the RabbitMQ server will detect the existence of an old
+database and will move it to a backup location, before creating a
+fresh, empty database, and will log a warning. If your RabbitMQ
+installation contains important data then we recommend you contact
+rabbitmq-sales@pivotal.io for assistance with the upgrade.
diff --git a/release-notes/README-2.1.0.txt b/release-notes/README-2.1.0.txt
new file mode 100644
index 0000000000..0a5e70d45c
--- /dev/null
+++ b/release-notes/README-2.1.0.txt
@@ -0,0 +1,53 @@
+Release: RabbitMQ 2.1.0
+
+Release Highlights
+==================
+
+server
+------
+enhancements
+ - detects incorrect nodename in rabbitmq_multi
+ - extend supported timeout types for queue lease, see
+ https://www.rabbitmq.com/extensions.html#queue-leases
+ - print plugin versions on startup
+ - extend permissions system - add 'is_admin' field; useful for
+ the management plugin
+ - queue.declare and queue.delete should always work quickly, even
+ if the broker is busy
+
+bug fixes
+ - the 'client' permission scope wasn't working correctly
+ - in the presence of 'verify_peer' option broker will now not accept
+ self-signed ssl certificates
+ - fixed sasl logging to terminal
+ - fixed 'rabbitmq_multi stop_all' on freebsd
+ - fixed race condition which might result in a message being lost when
+ the broker is quitting
+ - fixed race condition in heartbeat handling, which could result
+ in a connection being dropped without logging the reason for that
+
+java client
+-----------
+enhancements
+ - basic.consume 'filter' argument is now called 'arguments'
+ - dropped Channel.queuePurge/2 method
+ - added --help flag to MulticastMain
+
+.net client
+-----------
+enhancements
+ - basic.consume 'filter' argument is now called 'arguments'
+
+bug fixes
+ - fixed race condition in synchronous basic.recover
+ - codegen was generating incorrect code for nowait parameter
+
+
+Upgrading
+=========
+The database schema has changed since the last release (2.0.0). When
+starting, the RabbitMQ server will detect the existence of an old
+database and will move it to a backup location, before creating a
+fresh, empty database, and will log a warning. If your RabbitMQ
+installation contains important data then we recommend you contact
+rabbitmq-sales@pivotal.io for assistance with the upgrade.
diff --git a/release-notes/README-2.1.1.txt b/release-notes/README-2.1.1.txt
new file mode 100644
index 0000000000..4203f8905a
--- /dev/null
+++ b/release-notes/README-2.1.1.txt
@@ -0,0 +1,60 @@
+Release: RabbitMQ 2.1.1
+
+Release Highlights
+==================
+
+server
+------
+enhancements
+ - add exchange to exchange bindings. See
+ www.rabbitmq.com/extensions.html#exchange-bindings. Blog post forthcoming.
+ - reduce disk use when creating and deleting queues
+ - faster connection termination and queue deletion for connections
+ that use exclusive queues
+ - miscellaneous persister performance improvements
+ - extend queue leases on declaration
+ - add 'client_flow' channel info item for 'rabbitmqctl list_channels'
+ - add SSL information for 'rabbitmqctl list_connections'
+ - enforce restrictions regarding the default exchange
+ - add version information to database - for future upgrades
+ - better memory detection on AIX
+
+bug fixes
+ - fix a bug that could kill rabbit after a queue.purge
+ - fix a bug which could cause 'rabbitmqctl list_connections' to crash
+ some of the connection handlers
+ - reduce per-queue memory back to expected levels
+ - don't ignore channel.flow when there were no consumers
+ - fix some bugs that caused too few or too many stats to be emitted
+
+java client
+-----------
+bug fixes
+ - eliminate the possibility of deadlock when opening channels at the
+ same times as others are being closed
+ - move heartbeat sender into a separate thread to ensure that missing
+ heartbeats are detected promptly in all cases
+
+.net client
+-----------
+enhancements
+ - added a means to detect when channel.flow is active
+
+building & packaging
+--------------------
+enhancements
+ - better use of dialyzer: report more warnings
+ - better dependency handling in server build, reducing rebuilds
+
+
+Upgrading
+=========
+The database schema has not changed since version 2.1.0, so user accounts,
+durable exchanges and queues, and persistent messages will all be retained
+during the upgrade.
+
+If, however, you are upgrading from a release prior to 2.1.0, when the
+RabbitMQ server detects the presence of an old database, it moves it to a
+backup location, creates a fresh, empty database, and logs a warning. If
+your RabbitMQ installation contains important data then we recommend you
+contact rabbitmq-sales@pivotal.io for assistance with the upgrade.
diff --git a/release-notes/README-2.2.0.txt b/release-notes/README-2.2.0.txt
new file mode 100644
index 0000000000..88231a0f20
--- /dev/null
+++ b/release-notes/README-2.2.0.txt
@@ -0,0 +1,106 @@
+Release: RabbitMQ 2.2.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- fix issue that causes cross-cluster communication to deadlock after
+ sustained cluster activity
+- fix queue memory leak when using the management plugin or other
+ consumers of queue statistics
+- brokers started with rabbitmq_multi.bat are now restartable
+- clustering reset no longer destroys installed plugins
+- fix race condition between queue declaration and connection
+ termination that causes spurious noproc errors to appear in the log
+- fix memory leak when long-running channels consume and cancel on
+ many queues
+- queue.declare and exchange.declare raise precondition_failed rather
+ than not_allowed when attempting to redeclare a queue or exchange
+ with parameters different than those currently known to the broker
+
+enhancements
+- automatic, lossless upgrade to new versions of RabbitMQ
+ (when not clustered)
+- support per-queue message TTL. See:
+ https://www.rabbitmq.com/extensions.html#queue-ttl
+- the volume of pending acks is now bounded by disk space rather
+ than by memory
+- store passwords as hashes
+- allow server properties to be configured in the RabbitMQ config file
+- SSL connections are listed as such by rabbitmqctl
+- simplify permission configuration by removing the client
+ permission scope
+- improve performance of message routing
+- removed support for basic.recover with requeue=false
+
+java client
+-----------
+enhancements
+- 'noAck' argument renamed to 'autoAck'
+- add PossibleAuthenticationFailureException and
+ ProtocolVersionMismatchException to match up with the .net client.
+
+.net client
+-----------
+bug fixes
+- fix race condition that can cause spurious SocketErrors to be thrown
+ during connection.close
+- fix WCF support to use 'amq.direct' exchange instead of default
+ exchange
+
+management plugin
+-----------------
+bug fixes
+- fix issue preventing user authentication when using Safari
+- backing queue stats now display correctly
+
+enhancements
+- the management plugin is now fully cluster-aware
+- show detailed incoming/outgoing message rates per channel, exchange
+ and queue
+- show active/idle state for channels and queues
+- show node uptime, rabbit version, erlang version and total queued
+ messages
+- add tab completion to rabbitmqadmin
+
+STOMP plugin
+------------
+enhancements
+- overhaul the destination selection process to use only the
+ 'destination' header
+- add support for /queue and /topic destinations
+- remove support for custom 'routing_key' and 'exchange headers' and
+ introduce /exchange/<name>/<key> destination type
+- the order of SEND and SUBSCRIBE frames is no longer important
+- STOMP listeners show up as such in the management plugin
+
+build and packaging
+-------------------
+bug fixes
+- remove build-time dependency on OTP source to allow users to
+ build without the OTP source present
+- eliminate all valid dialyzer errors
+
+enhancements
+- include pre-compiled man pages in the MacPorts distribution,
+ drastically reducing the number of dependencies required.
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ or from releases prior to 2.1.0, if
+the RabbitMQ installation does not contain any important data then
+simply install the new version. RabbitMQ will move the existing data
+to a backup location before creating a fresh, empty database. A
+warning is recorded in the logs. If your RabbitMQ installation
+contains important data then we recommend you contact
+rabbitmq-sales@pivotal.io for assistance with the upgrade.
diff --git a/release-notes/README-2.3.0.txt b/release-notes/README-2.3.0.txt
new file mode 100644
index 0000000000..54de49e4c4
--- /dev/null
+++ b/release-notes/README-2.3.0.txt
@@ -0,0 +1,163 @@
+Release: RabbitMQ 2.3.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- prevent message store deleting open files leading to eaccess on Windows
+ and potential disk space leak
+- various bugs in delegate leading to poor cluster performance and
+ nodes blocking if other nodes are down
+- ensure regular flushes of queue index data to disk resulting in better
+ data retention in the event of a broker failure
+- prevent queues from hibernating indefinitely on startup under memory
+ pressure
+- prevent message store in-memory cache from becoming too large
+- prevent infinite loop after certain types of queue process crash,
+ and prevent such a crash during queue deletion on Erlang R12B3
+- make SASL PLAIN parser more robust
+- fix startup scripts to work on Solaris 10
+- prevent delivery of large messages to consumers from blocking deliveries
+ on other channels
+- basic.recover affects prefetch count
+- prevent channel crash on basic.recover to a deleted queue
+- correct serialisation of PIDs in clusters, without which the
+ management plug-in failed to display some detailed stats
+- prevent potential crash of queues in clusters in the event of
+ improbable ordering of events upon the death of a channel
+- add missing failure diagnostics on rabbitmqctl list_consumers
+- fix truncated failure diagnostics for rabbitmqctl under Windows
+
+enhancements
+- add confirm mode - an extension to the AMQP 0-9-1 spec allowing
+ clients to receive streaming receipt confirmations for the messages
+ they publish. See
+ https://www.rabbitmq.com/extensions.html#confirms for more information.
+- add a basic.nack method. See
+ https://www.rabbitmq.com/extensions.html#negative-acknowledgements
+- add an unforgeable user-id header. See
+ https://www.rabbitmq.com/extensions.html#validated-user-id
+- pluggable SASL authentication mechanisms, and a new plugin
+ to authenticate using SSL (see below)
+- pluggable authentication / authorisation backends, and a new plugin
+ to authenticate and authorise using LDAP (see below)
+- internal exchanges (cannot be published to directly,
+ typically used with exchange-to-exchange bindings)
+- users can be made unable to log in with a password
+- IPv6 support. RabbitMQ will listen on IPv4 and IPv6 by default.
+- list SSL algorithm information in rabbitmqctl
+- improved diagnostic error messages in common startup error cases
+- allow node name to be specified without a host
+- persister optimisation - eliminate unnecessary pending actions upon
+ queue deletion (pseudo pipeline flush)
+- improve pluggable exchange type API to allow better handling of race
+ conditions
+
+
+java client
+-----------
+bug fixes
+- fix for compilation under Java 1.5
+- remove support for Java 1.4
+
+enhancements
+- confirm mode
+- pluggable SASL authentication mechanisms
+- include generated source in Maven source bundle
+
+
+.net client
+-----------
+bug fixes
+- noAck set correctly in Subscription class
+
+enhancements
+- confirm mode
+- pluggable SASL authentication mechanisms
+- API tidied up to more closely resemble that of the Java client
+- distribute XML documentation with binary release
+
+
+management plugin
+-----------------
+bug fixes
+- race condition that can lead to stats db failing on queue deletion
+- closing connections on remote cluster nodes
+- fix web UI memory leaks in Chrome
+- mitigate web UI memory leaks in all browsers
+
+enhancements
+- command line tool rabbitmqadmin can display overview statistics and
+ filter columns
+- context-sensitive help
+- web UI state is persistent
+- display statistics for confirms
+- API: empty fields can be omitted on PUT
+- no longer depends on the crypto application, simplifying installation
+ for some users
+
+
+STOMP plugin
+------------
+bug fixes
+- plug channel leak on UNSUBSCRIBE
+- fix breakage of SEND after UNSUBSCRIBE
+- gracefully handle SUBSCRIBE to non-existent exchange
+- correct semantics of UNSUBSCRIBE receipts
+
+enhancements
+- updates to support the draft STOMP 1.1 spec
+- major refactoring to use OTP behaviours
+- enhanced and fixed examples
+- IPv6 support
+
+
+build and packaging
+-------------------
+
+enhancements
+- Windows bundle now includes Erlang R14B01
+
+
+shovel plugin
+-------------
+bug fixes
+- close client connections properly if failure occurs during startup
+
+enhancements
+- allow specification of heartbeat, frame_max and channel_max in
+ connection URI
+
+
+ssl authentication mechanism plugin
+-----------------------------------
+Experimental plugin allowing clients to authenticate with the SASL
+EXTERNAL mechanism and client SSL certificates. A password is not
+required.
+
+
+ldap authentication backend plugin
+----------------------------------
+Experimental plugin allowing the authentication / authorisation
+database to be hosted in an LDAP server.
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ or from releases prior to 2.1.0, if
+the RabbitMQ installation does not contain any important data then
+simply install the new version. RabbitMQ will move the existing data
+to a backup location before creating a fresh, empty database. A
+warning is recorded in the logs. If your RabbitMQ installation
+contains important data then we recommend you contact
+rabbitmq-sales@pivotal.io for assistance with the upgrade.
diff --git a/release-notes/README-2.3.1.txt b/release-notes/README-2.3.1.txt
new file mode 100644
index 0000000000..7d62429044
--- /dev/null
+++ b/release-notes/README-2.3.1.txt
@@ -0,0 +1,36 @@
+Release: RabbitMQ 2.3.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- fix critical bug causing queue processes to sometimes crash when
+ using transactions or confirms
+- improve error message when failing to declare a queue or exchange due
+ to argument equivalence
+
+java client
+-----------
+bug fixes
+- fix race condition closing a channel
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ or from releases prior to 2.1.0, if
+the RabbitMQ installation does not contain any important data then
+simply install the new version. RabbitMQ will move the existing data
+to a backup location before creating a fresh, empty database. A
+warning is recorded in the logs. If your RabbitMQ installation
+contains important data then we recommend you contact
+rabbitmq-sales@pivotal.io for assistance with the upgrade.
diff --git a/release-notes/README-2.4.0.txt b/release-notes/README-2.4.0.txt
new file mode 100644
index 0000000000..0724be09d4
--- /dev/null
+++ b/release-notes/README-2.4.0.txt
@@ -0,0 +1,143 @@
+Release: RabbitMQ 2.4.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- in a cluster, don't fail with an internal-error when re-declaring a
+ queue on a connection to a node other than the queue's "home" node
+- in a cluster, report a not-found error instead of internal-error
+ when attempting to re-declare a durable queue whose node is
+ unavailable
+- do not ignore the RABBITMQ_LOG_BASE variable on Windows
+- fix a bug causing SSL connections to die on Erlang prior to R14
+ when using "rabbitmqctl list_connections" with the SSL options
+- various minor fixes
+
+enhancements
+- greatly speed up routing for topic exchanges with many bindings
+- propagate memory alarms across cluster, thus reacting better to
+ memory pressure on individual nodes.
+- sender-selected distribution (i.e. add support for the CC and BCC
+ headers). See
+ https://www.rabbitmq.com/extensions.html#sender-selected-distribution
+ for more information.
+- server-side consumer cancellation notifications. See
+ https://www.rabbitmq.com/extensions.html#consumer-cancel-notify
+ for more information.
+- have the server present its AMQP extensions in a "capabilities"
+ field in server-properties. See
+ https://www.rabbitmq.com/extensions.html#capabilities
+ for more information.
+- determine file descriptor limits accurately on Windows, usually
+ resulting in much higher limits than previously, which allows more
+ connections and improves performance
+- indicate in the logs when the file descriptor limit has been reached
+ (causing the server to not accept any further connections)
+- allow SASL mechanisms to veto themselves based on socket type
+- rename rabbitmq.conf to rabbitmq-env.conf, to avoid confusion with
+ rabbitmq.config
+- improve performance of publisher confirms
+- various other minor enhancements and performance improvements
+
+
+java client
+-----------
+bug fixes
+- prevent stack overflow when connections have large numbers channels
+- do not require a working reverse DNS when establishing connections
+
+enhancements
+- ConnectionFactory accepts a connection timeout parameter
+- allow prioritisation of SASL mechanisms
+- support for server-side consumer cancellation notifications
+- have the client present its AMQP extensions in a "capabilities"
+ field in client-properties
+- rename ReturnListener.handleBasicReturn to handleReturn
+
+
+.net client
+-----------
+bug fixes
+- WCF bindings specified in configuration files are no longer ignored
+
+enhancements
+- support for server-side consumer cancellation notifications
+- have the client present its AMQP extensions in a "capabilities"
+ field in client-properties
+- support IPv6
+
+
+management plugin
+-----------------
+bug fixes
+- hide passwords in the web UI
+- fix rabbitmqadmin's handling of Unicode strings
+
+enhancements
+- present the managed socket and open file counts and respective limits
+- better memory usage reporting for hibernating queues
+- better support for serving the web interface through a proxy
+- allow users to choose which node a queue is declared on
+- show memory alarm states for nodes
+- show statistics for basic.returns
+- publish/receive messages via HTTP; this is intended for testing /
+ learning / debugging, not as a general solution for HTTP messaging
+
+
+STOMP plugin
+------------
+bug fixes
+- prevent crash when publishing from STOMP, but subscribing from
+ non-STOMP
+- correctly process publishes spanning multiple network packets
+- do not crash when publishing with undefined headers
+- receipts for SEND frames wait on confirms
+- do not issue a DISCONNECT with receipt when a clean shutdown has
+ *not* occurred
+
+enhancements
+- add documentation. See https://www.rabbitmq.com/stomp.html
+- significant performance improvements
+- extend flow-control on back pressure through the STOMP gateway
+ preventing the STOMP from overloading the server
+- support for the "persistent" header
+- support for multiple NACK
+
+
+SSL authentication mechanism plugin
+-----------------------------------
+enhancements
+- only offer this mechanism on SSL connections
+
+
+build and packaging
+-------------------
+enhancements
+- Windows installer for the broker
+- remove the rabbitmq-multi script in order to simplify startup and
+ improve error reporting
+- add the "cond-restart" and "try-restart" options to the init script
+- specify runlevels in the rabbitmq-server.init script
+- make the java client jar an OSGi bundle
+- Debian package only depends on erlang-nox
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ or from releases prior to 2.1.0, if
+the RabbitMQ installation does not contain any important data then
+simply install the new version. RabbitMQ will move the existing data
+to a backup location before creating a fresh, empty database. A
+warning is recorded in the logs. If your RabbitMQ installation
+contains important data then we recommend you contact
+rabbitmq-sales@pivotal.io for assistance with the upgrade.
diff --git a/release-notes/README-2.4.1.txt b/release-notes/README-2.4.1.txt
new file mode 100644
index 0000000000..bacad447d9
--- /dev/null
+++ b/release-notes/README-2.4.1.txt
@@ -0,0 +1,87 @@
+Release: RabbitMQ 2.4.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- fix breakage of upgrades when durable queues are present or
+ following a non-clean shutdown
+- prevent "rabbitmqctl wait" from waiting forever in certain
+ circumstances
+- the broker can be run on Erlang R12B-3 again
+- some other small bug fixes
+
+enhancements
+- upgrades in clusters. See
+ https://www.rabbitmq.com/clustering.html#upgrading
+- improve memory usage when dealing with persistent messages waiting
+ on acks from consumers
+- better error reporting for some startup problems
+- add timestamp to events published to the amq.rabbit.log exchange
+
+
+java client
+-----------
+enhancements
+- remove dependency on javax.security.sasl, thus improving
+ compatibility with Android and WebSphere
+
+
+.net client
+-----------
+bug fixes
+- the client can be built on .NET 2.0 again
+
+
+management plugin
+-----------------
+bug fixes
+- fix issue that would cause non-admin users to be repeatedly prompted
+ for their password when viewing the queues page
+
+
+STOMP plugin
+------------
+bug fixes
+- the plugin works on Erlang R12 again
+
+
+SSL authentication mechanism plugin
+-----------------------------------
+bug fixes
+- accept SSL certificates with fields containing characters
+ outside ASN.1 PrintableString (e.g. underscores)
+
+
+build and packaging
+-------------------
+bug fixes
+- the OCF script works correctly when specifying an alternative
+ configuration file
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install
+the new version on all the nodes and follow these instructions:
+ https://www.rabbitmq.com/clustering.html#upgrading
+All configuration and persistent message data is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ prior to 2.1.1 or a stand-alone broker
+from releases prior to 2.1.0, if the RabbitMQ installation does not
+contain any important data then simply install the new
+version. RabbitMQ will move the existing data to a backup location
+before creating a fresh, empty database. A warning is recorded in the
+logs. If your RabbitMQ installation contains important data then we
+recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
diff --git a/release-notes/README-2.5.0.txt b/release-notes/README-2.5.0.txt
new file mode 100644
index 0000000000..c6495020c4
--- /dev/null
+++ b/release-notes/README-2.5.0.txt
@@ -0,0 +1,135 @@
+Release: RabbitMQ 2.5.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- reduce complexity of recovery, significantly improving startup times
+ when there are large numbers of exchanges or bindings
+- recover bindings between durable queues and non-durable exchanges
+ on restart of individual cluster nodes
+- do not read messages off disk in the x-message-ttl logic. This could
+ severely impact performance when many queues expired messages
+ (near)simultaneously.
+- resolve a timer issue that could impact performance when under high
+ load and memory pressure
+- make source code compilable with latest Erlang release (R14B03)
+- assert x-message-ttl equivalence on queue redeclaration
+
+enhancements
+- tracing facility for incoming and outgoing messages - see
+ https://www.rabbitmq.com/firehose.html
+- optionally serialise events for exchange types
+- detect available memory on OpenBSD
+- add Windows service description
+- improve inbound network performance
+- improve routing performance
+- new rabbitmqctl commands:
+ report - comprehensive report of server status for support purposes
+ environment - display application environment (such as config vars)
+ cluster_status - display cluster status (formerly part of 'status')
+
+java client
+-----------
+bug fixes
+- compile under Java 1.5 (again)
+
+enhancements
+- experimental API employing command objects and builders. See
+ http://hg.rabbitmq.com/rabbitmq-java-client/file/default/test/src/com/rabbitmq/client/test/AMQBuilderApiTest.java
+ for some examples. Feedback welcome!
+
+.net client
+-----------
+bug fixes
+- make method id of 'exchange.unbind-ok' match definition in the
+ broker, so the client lib can recognise that command.
+- WCF bindings specified in configuration files are no longer ignored
+
+enhancements
+- allow larger than default message sizes in WCF
+- updated documentation
+
+management plugin
+-----------------
+bug fixes
+- handle race between queue creation/deletion and stats reporting that
+ could result in errors in the latter, particularly when there are
+ large numbers of queues and/or high churn
+- handle race when starting the management plug-in on multiple cluster
+ nodes, which in some rare (but quite reproducible) circumstances
+ could cause some of the brokers to crash
+- remove duplicate 'messages' entry from queue stats JSON
+- make binding arguments optional in the HTTP API for binding creation
+- correct error handling in the HTTP API for binding creation
+- prevent spurious failures of aliveness test
+
+enhancements
+- performance improvements which significantly reduce the cost of
+ stats reporting, allowing the management plug-in to cope with much
+ higher numbers of queues, bindings, etc.
+- issue an alert when a configured user cannot access any vhost or a
+ vhost has no users
+- allow choice of which stats/info items to return in the HTTP API
+- include protocol adapter and direct connections in API and UI
+- full STOMP SSL information displayed
+
+rabbitmq-mochiweb
+-----------------
+
+enhancements
+- more flexible configuration permitting different services to run on
+ different ports, SSL support and interface-based restrictions. See
+ https://www.rabbitmq.com/mochiweb.html for more details. Note that by
+ default the JSON-RPC channel plugin will now listen on port 55670.
+
+STOMP plugin
+------------
+enhancements
+- support connections over SSL
+bug fixes
+- correct spelling of 'heart-beat' header
+- don't drop messages if producer hangs up quickly
+
+build and packaging
+-------------------
+bug fixes
+- fix breakage in /etc/init.d/rabbitmq-server rotate-logs command
+
+enhancements
+- plug-in build system: support the declaration of inter-plugin
+ dependencies, making development of plugins much easier.
+ Inter-module dependencies are calculated automatically for all
+ plugins. Note that some plugins and applications have been
+ renamed for consistency, which may require changes to any existing
+ `rabbitmq.config` to match.
+- do not require access to www.docbook.org when building the server
+ w/o docbook installed
+- get rid of some warnings in the .net client build
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install
+the new version on all the nodes and follow these instructions:
+ https://www.rabbitmq.com/clustering.html#upgrading
+All configuration and persistent message data is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ prior to 2.1.1 or a stand-alone broker
+from releases prior to 2.1.0, if the RabbitMQ installation does not
+contain any important data then simply install the new
+version. RabbitMQ will move the existing data to a backup location
+before creating a fresh, empty database. A warning is recorded in the
+logs. If your RabbitMQ installation contains important data then we
+recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
diff --git a/release-notes/README-2.5.1.txt b/release-notes/README-2.5.1.txt
new file mode 100644
index 0000000000..442055a6cd
--- /dev/null
+++ b/release-notes/README-2.5.1.txt
@@ -0,0 +1,34 @@
+Release: RabbitMQ 2.5.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- fix bug preventing upgrades from 2.1.1 and 2.2.0.
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install
+the new version on all the nodes and follow these instructions:
+ https://www.rabbitmq.com/clustering.html#upgrading
+All configuration and persistent message data is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ prior to 2.1.1 or a stand-alone broker
+from releases prior to 2.1.0, if the RabbitMQ installation does not
+contain any important data then simply install the new
+version. RabbitMQ will move the existing data to a backup location
+before creating a fresh, empty database. A warning is recorded in the
+logs. If your RabbitMQ installation contains important data then we
+recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
diff --git a/release-notes/README-2.6.0.txt b/release-notes/README-2.6.0.txt
new file mode 100644
index 0000000000..bcdb00cea6
--- /dev/null
+++ b/release-notes/README-2.6.0.txt
@@ -0,0 +1,181 @@
+Release: RabbitMQ 2.6.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- upgrading from RabbitMQ 2.1.1 to any later release could break if
+ there were durable queues with persistent messages present
+- on very slow machines, starting rabbit via the supplied init scripts
+ could fail with a timeout
+- rabbit could fail to stop (when asked to do so) in the presence of
+ some plug-ins (e.g. shovel)
+- 'ram' nodes in a cluster could consume ever increasing amounts of
+ disk space
+- the presence of fast consumers on a queue could significantly delay
+ the addition of new consumers
+- when a client was issuing a tx.commit in one channel, and
+ simultaneously, in another channel, deleted a durable queue with
+ persistent messages involved in that tx, rabbit could terminate with
+ an error
+- when a client was using both basic.qos and channel.flow, the latter
+ would fail to re-enable message flow
+- when using 'confirm' mode, the deletion of queues could cause nacks
+ to be issued (incorrectly)
+- in extremely rare circumstances (never observed in the wild), a
+ queue with a per-queue message ttl could break during sudden changes
+ in rabbit memory usage
+
+enhancements
+- introduce active-active HA, with queues getting mirrored on nodes in
+ a cluster. See https://www.rabbitmq.com/ha.html
+- revamp the handling of AMQP's tx (transaction) class and clarify its
+ behaviour See https://www.rabbitmq.com/specification.html#tx
+- replace the 'administrator' flag, as used by the management plugin,
+ with a more general 'user tags' mechanism. See
+ https://www.rabbitmq.com/man/rabbitmqctl.8.man.html#set_user_tags
+- do not require 'configure' permissions for passive queue/exchange
+ declaration
+- optimise of message delivery on channels with a basic.qos
+ prefetch limit that are consuming from many queues
+- in 'rabbitmqctl list_channels', do not show the tx mode by default
+- when a cluster 'degrades' to only containing ram nodes - through
+ 'rabbitmqctl' actions or node failure - display/log a warning.
+- eliminate some spurious errors from the sasl log
+
+java client
+-----------
+enhancements
+- allow response timeouts to be specified in the {Json}RpcClient
+- introduce Channel.waitForConfirms() helper method, to make usage of
+ 'confirm' mode more convenient in common cases.
+- re-introduce default constructor for BasicProperties
+- cater for multiple listeners in all APIs
+- eradicate use of impl types in public APIs
+- make Tracer embeddable
+
+.net client
+-----------
+enhancements
+- return the complete result of a QueueDeclare, rather than just the
+ queue name.
+- introduce IModel.WaitForConfirms() helper method, to make usage of
+ 'confirm' mode more convenient in common cases.
+- document 'confirms' in user guide
+
+management plugin
+-----------------
+bug fixes
+- listing/inspecting queues with exclusive consumers would trigger a
+ 500 error
+- lots of cookies would be created for recording implicit preferences
+- /api/aliveness-test could return a 500 error instead of 401
+- fix off-by-one error in used file descriptor count on some
+ platforms, and gracefully deal with absence of 'lsof' command
+
+enhancements
+- introduce a more advanced permissions model, allowing access to
+ information for monitoring purposes without the user needing to be a
+ rabbit administrator. See
+ https://www.rabbitmq.com/management.html#permissions
+- simplify changing the URL; shorter default URL
+- make the stats collection interval configurable, providing a way to
+ reduce the impact of stats collection on servers with many active
+ connections/channels/queues, and adjust the rate calculation
+ period. See
+ https://www.rabbitmq.com/management.html#statistics-interval
+- in a cluster, make the management stats db highly available; it
+ automatically fails over to a different node
+- get the management stats db to 'catch up' when it is started in a
+ cluster and there are existing nodes with queues etc
+- report file descriptor counts on more platforms
+- display message re-delivery rates
+- show (mochi)web listeners
+- handle encoding errors gracefully
+- add an extension mechanisms - plug-ins for the management
+ plug-in. The first of these is rabbitmq-shovel-management which
+ displays status information of the rabbitmq-shovel plugin
+- add fields for well-known arguments such as message TTL and alternate
+ exchange to queue and exchange forms
+
+
+mochiweb plugin
+---------------
+bug fixes
+- on slow machines a timeout could occur during startup
+
+enhancements
+- the '*' listener context no longer needs to be specified, thus
+ simplifying configuration
+
+auth-backend-ldap plugin
+------------------------
+enhancements
+- eliminate "undefined function" warning on startup
+
+shovel plugin
+-------------
+enhancements
+- support guaranteed delivery with 'confirm' mode
+- support the use of AMQP 0-9-1 methods in configuration
+
+STOMP plugin
+------------
+bug fixes
+- heartbeats were issued as a 0x0 byte instead of LF (0x0A)
+
+enhancements
+- provide a way to send & subscribe to existing AMQP queues
+- support temporary/reply queues
+- support durable subscriptions
+- set the default prefetch count for /queue destinations to
+ 'unlimited' instead of 1
+- optionally allow clients to omit the login & passcode in CONNECT
+ frames, using a configurable default user instead
+- optionally allow clients to omit the CONNECT frame altogether
+
+For more details on all the above see the STOMP plugin documentation
+at https://www.rabbitmq.com/stomp.html
+
+federation plugin
+-----------------
+First release of this plugin, which offers scalable publish /
+subscribe messaging across WANs and administrative domains. See
+http://hg.rabbitmq.com/rabbitmq-federation/file/default/README
+
+build and packaging
+-------------------
+bug fixes
+
+enhancements
+- make Windows Start menu entries more easily identifiable/searchable
+- stop producing the Windows bundle. The Windows installer has matured
+ sufficiently to take its place.
+- employ the same convention for plugin app source files as rebar
+- clean up some xref warnings in the plugin build
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install
+the new version on all the nodes and follow these instructions:
+ https://www.rabbitmq.com/clustering.html#upgrading
+All configuration and persistent message data is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ prior to 2.1.1 or a stand-alone broker
+from releases prior to 2.1.0, if the RabbitMQ installation does not
+contain any important data then simply install the new
+version. RabbitMQ will move the existing data to a backup location
+before creating a fresh, empty database. A warning is recorded in the
+logs. If your RabbitMQ installation contains important data then we
+recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
diff --git a/release-notes/README-2.6.1.txt b/release-notes/README-2.6.1.txt
new file mode 100644
index 0000000000..c87e93be0f
--- /dev/null
+++ b/release-notes/README-2.6.1.txt
@@ -0,0 +1,59 @@
+Release: RabbitMQ 2.6.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- the broker failed to (re)start on reboot on systems that keep
+ /var/run on a temporary file systems, e.g. Ubuntu.
+- the Windows service failed to increase the Erlang process limit,
+ limiting the broker to a few thousand queues, connections and
+ channels.
+
+.net client
+-----------
+enhancements
+- add the "headers" exchange to RabbitMQ.Client.ExchangeType
+
+management plugin
+-----------------
+bug fixes
+- on a busy broker, /api/nodes could fail with a timeout, affecting
+ several management UI pages.
+
+topology visualiser
+-------------------
+First official release. See
+https://www.rabbitmq.com/plugins.html#rabbitmq_management_visualiser
+
+STOMP plugin
+------------
+enhancements
+- trim whitespace from headers when speaking STOMP 1.0
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply
+install the new version. All configuration and persistent message data
+is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install
+the new version on all the nodes and follow these instructions:
+ https://www.rabbitmq.com/clustering.html#upgrading
+All configuration and persistent message data is retained.
+
+To upgrade a non-clustered RabbitMQ from release 2.1.0, first upgrade
+to 2.1.1 (which retains all data), and then to the current version as
+described above.
+
+To upgrade a clustered RabbitMQ prior to 2.1.1 or a stand-alone broker
+from releases prior to 2.1.0, if the RabbitMQ installation does not
+contain any important data then simply install the new
+version. RabbitMQ will move the existing data to a backup location
+before creating a fresh, empty database. A warning is recorded in the
+logs. If your RabbitMQ installation contains important data then we
+recommend you contact rabbitmq-sales@pivotal.io for assistance with the
+upgrade.
diff --git a/release-notes/README-2.7.0.txt b/release-notes/README-2.7.0.txt
new file mode 100644
index 0000000000..62da1f7b7a
--- /dev/null
+++ b/release-notes/README-2.7.0.txt
@@ -0,0 +1,142 @@
+Release: RabbitMQ 2.7.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- acknowledgements were not properly handled on transaction rollback
+- could not declare a mirrored queue with a policy of "nodes" and an explicit
+ list of node names
+- queues created by different client libraries could look inequivalent to the
+ broker, though they had equivalent properties
+- queue process monitors were not removed correctly
+- server start up could hang when trying to contact other Erlang nodes in some
+ network configurations
+- on Windows some batch file variables might pass unescaped backslashes to the
+ broker, causing it to crash
+
+enhancements
+- messages re-queued (as a result of a consumer dying, for example) have their
+ original order preserved
+- in large queues under load, reduce length of time messages already on disk are
+ retained in memory
+- on platforms which support the High Performance Erlang Compiler (HiPE), the
+ server can optionally (re)compile selected modules on startup for increased
+ run-time performance; see https://www.rabbitmq.com/configure.html
+- the server automatically adapts to changes to virtual memory resources, and to
+ the memory high-watermark
+- the rabbit logs are appended to on restart; log rotation is simplified
+- improved synchronisation between rabbitmqctl and the server when stopping
+- non-query actions initiated by rabbitmqctl are logged
+- creating a connection is faster
+- shutdown is more efficient, especially when there are many queues to delete
+- concurrent message storage operations for many queues are more efficient
+- durable queues are faster on first use, and faster to recover
+- messages removed before being written to disk have the writes eliminated,
+ increasing message throughput under load
+- performance improvements to queues with large numbers of consumers with
+ low prefetch counts
+- internal flow control is more consistent
+- various other general performance improvements
+
+clients
+-------
+bug fixes
+- connection and channel closes in the clients had internal timeouts which
+ could expire prematurely and spoil the client's view of the channel state
+
+enhancements
+- clients accept a new "amqp" URI scheme, which can describe all of the
+ information required to connect to an AMQP server in one URI; see
+ https://www.rabbitmq.com/uri-spec.html
+
+erlang client
+-------------
+bug fixes
+- under some circumstances wait_for_confirms/1 could fail to return
+
+enhancements
+- a connection timeout value can be set for Erlang client connections
+- socket options may be specified on connection start
+
+java client
+-----------
+enhancements
+- consumer callbacks, and channel operations are threadsafe; calls to channel
+ operations can be safely made from a Consumer method call; Consumer callback
+ work threads can be user-supplied
+- channel or connection errors that refer to another method frame provide the
+ method's AMQP name (if it has one) in the error message
+
+.net client
+-----------
+bug fixes
+- some client methods were not documented correctly
+
+plugins
+-------
+bug fixes
+- HTTP-based plugins did not shut down correctly when stopped independently of
+ the Erlang VM
+
+enhancements
+- plugins are included in the main rabbitmq-server release, simplifying server
+ configuration and upgrades; a new tool, rabbitmq-plugins, enables and
+ disables plugins; see https://www.rabbitmq.com/plugins.html
+- rabbitmq_federation is no longer considered experimental
+- new experimental plugin: rabbitmq_consistent_hash_exchange, useful for load
+ balancing very high message rates across multiple queues
+- new experimental plugin: rabbitmq_tracing, a management UI for the firehose
+
+management plugin
+-----------------
+bug fixes
+- queue details page failed to display on recent browsers (e.g. Firefox 6) for
+ High Availability queues
+
+enhancements
+- more detailed global memory statistics shown
+- "all configuration" is renamed to "definitions" to reduce confusion with
+ rabbitmq.config
+
+auth-backend-ldap plugin
+------------------------
+enhancements
+- the queries are extended to include attributes and allow pattern-matching
+
+mochiweb plugin
+---------------
+enhancements
+- the limit on upload size is increased to 100MB so that JSON-RPC channel can
+ publish larger messages
+
+STOMP adapter
+-------------
+bug fixes
+- the STOMP adapter could crash when exceeding the memory high watermark
+
+build and packaging
+-------------------
+bug fixes
+- on non-Windows platforms invoking rabbitmq as a daemon could leave standard
+ input and output streams permanently open
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-2.7.1.txt b/release-notes/README-2.7.1.txt
new file mode 100644
index 0000000000..1f56d1570c
--- /dev/null
+++ b/release-notes/README-2.7.1.txt
@@ -0,0 +1,105 @@
+Release: RabbitMQ 2.7.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- long-running brokers could crash due to global unique identifiers not being
+ unique enough
+- leader election of mirrored queues could fail when using confirms
+- there was a slow memory leak in HA queues with persistent and confirmed
+ messages
+- when using HA queues with policy of 'nodes', leader replica didn't recover
+ properly
+- HA queues could fail when nodes were restarting frequently
+- broker sometimes hung when closing channels and connection from multiple
+ threads
+- queue equivalence check did not properly detect different arguments under
+ some circumstances
+- the broker sometimes hung when recovering queues on startup
+- 'rabbitmqctl list_connections' could return incomplete information
+- broker-generated queue names did not conform to AMQP syntax rules
+- a (harmless) warning was emitted when running under Erlang R15B
+
+enhancements
+- deletion of exchanges or queues with many bindings is more efficient
+- 'rabbitmqctl eval <expr>' evaluates arbitrary Erlang expressions in the
+ broker node
+
+java client
+-----------
+bug fixes
+- resources were not recovered if ConnectionFactory failed to connect
+- defaults for the ConnectionFactory class were not public
+- part of the Java client API was hidden, causing application build errors
+- interrupts were mishandled in the Java threading logic
+
+.net client
+-----------
+bug fixes
+- session autoclose could fail with AlreadyClosedException
+
+plugins
+-------
+bug fixes
+- consistent-hash-exchange mis-routed messages when handling multiple exchanges
+
+management plugin
+-----------------
+bug fixes
+- statistics database could remain down after nodes were restarted
+- broker could fail to start if clients attempt to connect before the
+ management plugin is fully started
+- management plugin could fail to start if there were strange permissions
+ in /proc
+- overview could sometimes crash when another node starts up or shuts down
+- HA mirror synchronisation could sometimes be misrepresented on the
+ management UI
+- encoding of underscore in URL properties was incomplete
+- management interface could break if there were html syntax characters in names
+- shovels were not displayed if they were in an undefined state
+
+enhancements
+- rate of change of queue lengths added to the management API and UI
+- improvements to shovel information formatting
+
+auth-backend-ldap plugin
+------------------------
+bug fixes
+- made compatible with Erlang R15B
+enhancements
+- accept a broader class of group objects on in_group filter
+
+STOMP adapter
+-------------
+bug fixes
+- duplicate headers were generated in some MESSAGE frames
+- temporary reply-to queues were not re-usable
+- made compatible with Erlang R15B
+
+build and packaging
+-------------------
+bug fixes
+- rabbitmq-server Mac OS X portfile was incorrectly built
+- maven bundle for Java client was not published to maven central
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact <support at rabbitmq.com> for
+assistance with the upgrade.
diff --git a/release-notes/README-2.8.0.txt b/release-notes/README-2.8.0.txt
new file mode 100644
index 0000000000..ace510e875
--- /dev/null
+++ b/release-notes/README-2.8.0.txt
@@ -0,0 +1,202 @@
+Release: RabbitMQ 2.8.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- reduce idle CPU usage when there are lots of mirrored queues
+- fix a rare bug which could cause the server to stop accepting connections
+- ensure starting a ram node when all disk nodes are down fails, instead
+ of creating a blank ram node
+- fix a race in mirrored queues where one node could run two mirrors
+- improve internal accounting of file descriptors; make it harder to hit the
+ limit unexpectedly
+- rabbitmqctl <unknown-action> fixed on R15B
+- fix race condition leading to monitoring other cluster nodes twice
+- leave the Erlang distributed system, not just Mnesia, when resetting
+- more consistent handling of RABBITMQ_* environment variables
+
+enhancements
+- dead lettering - queues can specify an exchange to which messages should be
+ redirected when they are rejected or expire
+- internal flow control to limit memory use and make performance more
+ predictable if the server is overloaded
+- fsync after Mnesia transactions to ensure recently-created queues, exchanges
+ are not lost in the event of an unexpected shutdown
+- much more eager fsync when persistent messages are published without
+ confirms / transactions leading to far fewer messages lost in the event
+ of an unexpected shutdown
+- server no longer fails to start when a durable exchange is declared using
+ an exchange type plugin which is subsequently disabled. Instead the exchange
+ exists but routes no messages
+- better OpenBSD support (thanks to Piotr Sikora)
+- basic.reject and basic.nack now respect transactions
+- rabbitmq-echopid.bat introduced: allows obtaining the server PID on Windows
+- the start of logging configuration: initially just for per-connection logging
+- set SO_LINGER to 0 to prevent file descriptors being used by closed
+ connections
+- improve error reporting when AMQP ports are already used by non-RabbitMQ
+ brokers
+- improve error reporting when Mnesia times out waiting for tables
+- consistent naming of connections and channels across rabbitmqctl and the
+ management plugin
+- file descriptor statistics added to "rabbitmqctl status"
+- more robustness if rabbitmq-plugins cannot parse the enabled plugins file
+- don't start external cpu_sup process; we don't need it
+
+performance improvements
+- consuming has smarter flow control, leading to performance improvements in
+ many cases
+- deleting queues with many bindings to topic exchanges is no longer
+ O(binding_count^2)
+- message ID generation is somewhat faster
+
+
+packaging
+---------
+bug fixes
+- debian: add build dependency on erlang-nox
+- debian / rpm: don't start the server with "su", fixing inability to
+ shut down seen on Ubuntu
+- macports: fix plugins showing version as "0.0.0"
+- macports: create configuration directory if it does not already exist
+- windows: INSTALL file now contain Windows-style line endings
+
+enhancements
+- generic unix tarball: by default locate all log / db / conf files within
+ the unpacked tarball, simplifying installation
+
+
+erlang client
+-------------
+bug fixes
+- fix "make documentation"
+
+enhancements
+- wait_for_confirms() can now take a timeout
+
+
+java client
+-----------
+bug fixes
+- fix memory leak when channels were closed on a connection that stays open
+- fix display of message headers and content in the tracer
+- fix hang in ConnectionFactory when the first frame from the server is never
+ received
+- fix NullPointerException at ConsumerDispatcher.java:91
+
+enhancements
+- waitForConfirms() can now take a timeout
+- allow use of Java arrays in AMQP arrays (e.g. for arguments and headers)
+- don't depend on org.apache.commons classes except for tests
+- fire channel shutdown listeners on connection shutdown
+- show fractional message rates in MulticastMain
+- show aggregated producer rates in MulticastMain
+
+
+.net client
+-----------
+bug fixes
+- don't try to close the socket more than once if a heartbeat is missed
+
+enhancements
+- WaitForConfirms() can now take a timeout
+
+
+management plugin
+-----------------
+bug fixes
+- fix overview page in MSIE
+- escape HTML entities properly in the web UI
+- fix incorrect display of mirrored queues as unsynchronised after database
+ failover
+- give sensible error if user tags field is missing
+- fix [Admin] etc links which were broken in some browsers
+- fix wrong date in "last updated" in the web UI
+
+enhancements
+- add separate form to update users
+- add option to import file of entity definitions at startup
+- publish messages from the queue details page of the web UI
+- make "exchange type" into a select box in the web UI
+- show the connection heartbeat more clearly in the web UI
+
+
+json-rpc plugin
+---------------
+bug fixes
+- fix memory leak in ETS
+
+
+rabbitmqadmin
+-------------
+bug fixes
+- fix "rabbitmqadmin get"
+- display array info-items correctly
+- allow specifying node for queue declaration
+
+enhancements
+- configuration file for connections (thanks to Massimo Paladin)
+
+
+federation plugin
+-----------------
+enhancements
+- allow the queue declared upstream to be mirrored
+
+
+STOMP plugin
+------------
+bug fixes
+- fix invalid MESSAGE Frames for reply-to temporary queues
+- fix non-UTF-8 durable topic subscription queue names
+- behave sensibly on death of the internal AMQP connection / channel
+- prevent an infinite loop when implicit connect enabled with an invalid
+ username / password
+- allow more than one SSL handshake to happen at once
+
+enhancements
+- support client login via SSL certificate, similar to
+ rabbitmq_auth_mechanism_ssl for AMQP
+- performance improvement: don't declare a queue on every publish
+- support the server's new flow control mechanism
+- add "server" field to the "CONNECTED" frame
+
+
+auth backend LDAP plugin
+------------------------
+enhancements
+- optionally look up a user's DN after binding - useful for Microsoft
+ Active Directory
+- remove build time dependency on nmap
+- allow queries to run as the user we bound as
+
+
+auth mechanism SSL plugin
+-------------------------
+bug fixes
+- don't blow up if a certificate contains more than one Common Name
+
+enhancements
+- support obtaining the user name from the certificate's Distinguished Name
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-2.8.1.txt b/release-notes/README-2.8.1.txt
new file mode 100644
index 0000000000..73adfbd9b8
--- /dev/null
+++ b/release-notes/README-2.8.1.txt
@@ -0,0 +1,41 @@
+Release: RabbitMQ 2.8.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- fixed cluster upgrades for RAM nodes
+
+
+packaging
+---------
+bug fixes
+- remove use of runuser for the SUSE RPM
+- package rabbitmq-echopid.bat on Windows
+
+
+management plugin
+-----------------
+bug fixes
+- fixed web UI for users with only the 'management' tag
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-2.8.2.txt b/release-notes/README-2.8.2.txt
new file mode 100644
index 0000000000..616d04b4ec
--- /dev/null
+++ b/release-notes/README-2.8.2.txt
@@ -0,0 +1,106 @@
+Release: RabbitMQ 2.8.2
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- memory leak on basic.reject{requeue = false}
+- queue failure when dead-lettering a message which has been paged to disc
+- memory leak with dead-letter cycles and HA queues
+- possible message loss with dead-lettered messages and HA queues at broker
+ shutdown
+- HA mirror queue crash on basic.reject{requeue = false}
+- HA mirror queue crash with dead-letter cycle
+- messages might incorrectly not expire if consumed immediately and then
+ requeued
+- message acks might be lost if sent immediately before channel closure
+- dead-lettering a large number of messages at once was very slow
+- memory leak when queue references a DLX that does not exist
+- fix startup in FreeBSD jail without IPv6 support (thanks to Mattias Ahlbäck)
+- error logged by mirrored queue supervisor on shutdown
+
+enhancements
+- disc space monitoring and blocking, similar to the existing memory monitoring
+- substantially improve performance publishing large messages
+- improve performance delivering small messages
+- improve performance of routing via the default exchange
+- allow x-message-ttl to be set to 0 (useful as an alternative to
+ immediate-mode publish)
+- ensure unacked messages have been requeued by the time channel.close_ok is
+ received
+- remove non-free RFC from source package (Debian bug #665793)
+
+
+packaging
+---------
+bug fixes
+- Debian: provide a mechanism to change the FD limit
+- Debian: don't delete the rabbitmq user when removing package (#663503,
+ #620799, #646175)
+- Debian: use lsb-base functions in init script (#663434)
+
+
+java client
+-----------
+bug fixes
+- RpcClient used the platform default encoding instead of UTF-8
+- waitForConfirms returned true even if basic.nack received
+
+enhancements
+- MulticastMain supports option to multi-ack every N messages
+
+
+erlang client
+-------------
+bug fixes
+- crash on shutdown of AMQP connection
+
+
+management plugin
+-----------------
+bug fixes
+- management plugin could miss queue deletion events in some crash scenarios
+- dependency on xmerl was not declared
+
+enhancements
+- show a large warning when running a cluster with mixed RabbitMQ / Erlang
+ versions
+
+
+federation plugin
+-----------------
+bug fixes
+- leak of direct connections when federation links failed to start up
+
+enhancements
+- prevent messages which have reached max_hops from being transmitted one
+ additional time, using a custom upstream exchange
+- link status reporting, similar to that provided by rabbitmq-shovel, from
+ the command line and management plugin
+
+
+STOMP plugin
+------------
+bug fixes
+- "destination" header on MESSAGE did not match that used on SEND
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-2.8.3.txt b/release-notes/README-2.8.3.txt
new file mode 100644
index 0000000000..e004151dec
--- /dev/null
+++ b/release-notes/README-2.8.3.txt
@@ -0,0 +1,89 @@
+Release: RabbitMQ 2.8.3
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- several fixes to communication protocol underlying HA queues
+- memory leak deleting HA queues
+- rotating logs loaded the entire log file into memory
+- queues with many busy consumers could refuse to accept publishes until empty
+- stale transient queue information could be left behind when a node restarted
+ quickly
+- additional cluster nodes that started with insufficient disk space would
+ never accept connections even after disk space increased
+- rabbitmqctl displayed non-ASCII characters incorrectly in some error
+ messages
+- disk space monitoring on non-English versions of Windows did not work
+- RABBITMQ_PLUGINS_DIR could not be set on Windows
+
+enhancements
+- set default disk space limit to 1GB since many users were running into the
+ previous default limit of {mem_relative, 1.0} when running RabbitMQ for
+ the first time
+
+
+packaging
+---------
+bug fixes
+- Debian: uninstalling failed if broker was stopped
+- Debian: server process was not child of init, leading it to get closed in
+ certain situations
+
+enhancements
+- Debian: emit upstart events
+
+
+java client
+-----------
+enhancements
+- improved performance with SSL
+
+
+erlang client
+-------------
+bug fixes
+- code_change/3 did not return {ok, State} in many places breaking
+ applications that use code reloading and the Erlang client
+- spurious function_clause error in logs when SSL connection closed abruptly
+ under R15B0x
+
+
+federation plugin
+-----------------
+enhancements
+- set default prefetch count to 1000, rather than unlimited, to ensure there
+ is flow control on federation links
+
+
+management plugin
+-----------------
+bug fixes
+- rare race condition that could cause management DB failover to fail
+
+
+STOMP plugin
+------------
+bug fixes
+- last message of a burst may not be received when flow control is active
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-2.8.4.txt b/release-notes/README-2.8.4.txt
new file mode 100644
index 0000000000..ee08042163
--- /dev/null
+++ b/release-notes/README-2.8.4.txt
@@ -0,0 +1,30 @@
+Release: RabbitMQ 2.8.4
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- log file rotation when no suffix was specified could lead to the log file
+ growing rapidly
+- rabbitmq-plugins.bat did not work
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-2.8.5.txt b/release-notes/README-2.8.5.txt
new file mode 100644
index 0000000000..7cfad51e55
--- /dev/null
+++ b/release-notes/README-2.8.5.txt
@@ -0,0 +1,47 @@
+Release: RabbitMQ 2.8.5
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- unnecessary CPU utilisation no longer occurs in the presence of large
+ numbers of idle HA queues
+- rapidly declaring and then deleting HA queues no longer crashes the master
+- fixed a race condition in handling node down signals, that could result in
+ HA queues failing to restart when bringing a follower replica (mirror) back online
+- channels no longer crash when detecting nodes that have gone offline
+- rabbitmqctl no longer garbles error messages when rendering non-ASCII
+ characters
+- the installer now places the .erlang.cookie file in %HOMEDRIVE%%HOMEPATH%
+ on Windows, so that %USERPROFILE% can be safely relocated
+
+STOMP plugin
+------------
+bug fixes
+- fixed a bug in the test suite that was failing to check for the expected
+ number of receipts before checking if a message had arrived successfully
+
+jsonrpc-channel plugin
+----------------------
+bug fixes
+- updated to support the latest versions of rfc4627 and Mochiweb
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-2.8.6.txt b/release-notes/README-2.8.6.txt
new file mode 100644
index 0000000000..ea2f234756
--- /dev/null
+++ b/release-notes/README-2.8.6.txt
@@ -0,0 +1,64 @@
+Release: RabbitMQ 2.8.6
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- ensure shutdown of mirrored queue nodes is recorded correctly
+- removed unsupported plugins added in 2.8.5 (old-federation, sockjs-erlang,
+ cowboy, web-stomp and web-stomp-examples)
+- removing RAM nodes from a cluster no longer leads to inconsistent state
+ on disk nodes (which previously failed to notice the RAM nodes' departure)
+- reap TTL-expired messages promptly
+- correct reporting of the vm_memory_high_watermark
+- reduce likelihood of node name collision on Windows due to non-randomness
+ of %RANDOM%
+
+
+erlang client
+-------------
+bug fixes
+- correctly account for file handles consumed by outgoing network connections
+ when running as a plugin
+
+
+management plugin
+-----------------
+bug fixes
+- prevent publishing a message with non-binary content
+
+
+shovel plugin
+-------------
+bug fixes
+- guarantee that reconnect attempts continue if a failure occurs during
+ connection establishment
+
+
+federation plugin
+-----------------
+bug fixes
+- guarantee that links continue to attempt reconnecting if a failure occurs
+ during connection establishment
+- report status correctly in the event of unexpected failure
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-2.8.7.txt b/release-notes/README-2.8.7.txt
new file mode 100644
index 0000000000..eb6a9281a5
--- /dev/null
+++ b/release-notes/README-2.8.7.txt
@@ -0,0 +1,70 @@
+Release: RabbitMQ 2.8.7
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+- fix race condition that could stop mirrored queue from sending further
+ confirms, and cause it to leak memory
+- fix bug that prevented confirms from mirrored queues when x-message-ttl
+ was set to zero
+- fix mirror synchronisation detection logic in mirrored queues
+- fix possible deadlock during broker shutdown
+- fix resource leak when declaring many short-lived mirrored queues with
+ different names
+- fix DOS vulnerability possible by malicious SSL clients
+- make disk free space reporting more intelligible
+
+performance improvements
+- reduce unnecessary fsync operations when deleting non-durable resources
+- mirror nodes of mirrored queues now pro-actively persist acks and messages on
+ a timer with a sensible interval
+
+
+packaging
+---------
+bug fixes
+- ensure source packages can be built without network access
+
+
+erlang client
+-------------
+bug fixes
+- ensure management plugin is notified when connections fail as soon as
+ they are opened
+
+enhancements
+- offer configuration flag for ipv4 / ipv6 preference
+
+
+management plugin
+-----------------
+bug fixes
+- prevent management plugin from crashing nodes when failing over
+
+
+STOMP plugin
+------------
+bug fixes
+- fix bug that caused alarms (e.g. disk free space checking) to turn off
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.0.0.txt b/release-notes/README-3.0.0.txt
new file mode 100644
index 0000000000..f28b5b8408
--- /dev/null
+++ b/release-notes/README-3.0.0.txt
@@ -0,0 +1,244 @@
+Release: RabbitMQ 3.0.0
+
+server
+------
+bug fixes
+25195 prevent mirror being promoted before it has received all
+ messages from dying master, which subsequently caused it to crash
+25202 fix race at mirror startup causing process leak and other problems
+25216 prevent excessive memory use when recovering large durable queues
+25226 prevent mirrors leaking memory when using persistence and confirms
+25260 ensure messages that expire while the broker is stopped are
+ dead-lettered correctly
+25198, 25185, 25200 ensure "redelivered" flag set correctly on HA
+ queue failover
+25053 ensure statistics event emission does not prevent fsync
+25215 ensure rapid declaration of many mirrored queues does not overload
+ Mnesia
+25000 don't write a PID file when -detached is specified as it will
+ not be right
+25263 fix channel number in error message if rabbit_writer crashes
+25295, 25297 fix a couple of small memory leaks in rabbit_channel when
+ one channel sees many queues deleted
+
+enhancements
+24991 correctly enforce AMQP frame_max. Note that some buggy clients
+ do not frame messages correctly; as of 3.0.0 these clients
+ will be rejected if they attempt to send a message large than
+ frame_max (by default 128k).
+24908 allow queue mirroring to be defined by broker-wide policy, not
+ queue declaration, and add "exactly" mode
+24196 dynamic runtime configuration mechanism (parameters)
+24914 rewrite of rabbitmqctl clustering commands for greater user
+ friendliness
+24915 background GC for idle processes, preventing excessive memory use
+19376 support per-message TTL
+25110 provide more detailed statistics on broker memory use (reported
+ in 'rabbitmqctl status')
+25227 provide information if a network partition has occurred
+ (reported in 'rabbitmqctl cluster_status')
+24792 improve plugin startup procedure:
+ - "rabbitmqctl wait" will not return until plugins have started
+ - Windows service users no longer need to reinstall service
+ after enabling plugins
+24971 enable heartbeats by default
+24719 ensure broker starts and rabbitmq-plugins works when plugin
+ dependencies are missing
+25164 give better error message when attempting to use a queue which
+ is on a down node
+25086 improve error reporting for framing errors
+25262 sort the output of rabbitmqctl list_*
+23935 reverse DNS lookups for connection info items (disabled by
+ default, see the 'reverse_dns_lookup' configuration item)
+25193 expose count of non-blocked consumers as a queue info item
+24998 make memory / disk alarms easier to spot in log files
+24956 provide better log messages when heartbeat timeouts occur
+24919 check flags passed to rabbitmqctl match the subcommand
+25244 cope if clients set malformed "x-death" headers
+24867 reject attempts to declare queues with x-expires or
+ x-message-ttl greater than 2^32 milliseconds since this will
+ not work
+
+feature removal
+23896 remove support for AMQP's "immediate" publish mode
+
+performance improvements
+25145 greatly improve performance of mirrored queues
+24974 improve performance of SSL when using HiPE compilation
+24888 improve performance of bulk dead-lettering
+
+
+packaging
+---------
+bug fixes
+21413 follow specifications better for init script on RPM distros
+
+
+management plugin
+-----------------
+bug fixes
+25048 ensure queue synchronisation is always shown correctly
+25149 move management port out of the ephemeral range (which could
+ result in the management plug-in failing to start). The new
+ default port is 15672, with a redirect in place from 55672.
+25220 allow bindings with arguments containing AMQP tables and arrays
+23225 correctly display network traffic statistics for connections
+ which do nothing but receive autoack messages
+25151 fix sorting by and selection of queue/exchange arguments
+
+enhancements
+25232 provide branded login screen and logout button
+24830 allow argument types to be selected in the web UI
+24859 add global counts of various objects
+24949 more flexible top-level navigation in the web UI
+25218 make binding display on exchange/queue details page clearer
+25135 show abbreviated client properties in web UI connection list
+25259 simplify management listener configuration
+24916 make the statistics database hibernate when idle, thus reducing
+ memory use
+24932 improve responsiveness of the statistics database on a heavily
+ loaded broker
+25148 clearer error message when the management port is in use at startup
+24967 web UI help texts link to the website for more information
+24983 clearer error message when rabbitmqadmin is run against
+ an incompatible Python version
+25209 make rabbitmqadmin work when passed through 2to3 (thanks to Alan
+ Antonuk)
+
+
+federation plugin
+-----------------
+bug fixes
+24856 prevent bindings from propagating too far and leaking in the
+ presence of upstream cycles
+25166 eliminate delays in broker shut down on disturbed networks
+25022 don't crash on consumer cancellation notification
+
+enhancements
+23908 allow addition and removal of upstreams while the broker is running
+24826 allow federation of normal exchanges, remove "x-federation" exchange
+ type
+24695 specify upstreams using amqp:// URIs
+23903 allow passing through user-id from trusted upstreams
+25029 improve consistency of federation nomenclature
+25244 cope if clients set malformed "x-received-from" headers
+
+
+old-federation plugin
+---------------------
+24822 new (old) plugin: a backported version of the federation
+ plugin from 2.8.7.
+
+
+shovel plugin
+-------------
+bug fixes
+25166 eliminate delays in broker shut down on disturbed networks
+25022 don't crash on consumer cancellation notification
+
+enhancements
+25049 remove dependency on Erlando
+
+
+STOMP plugin
+------------
+bug fixes
+25045 don't drop subscriptions on consumer cancellation notification
+25067 prevent header without colon from breaking all subsequent headers
+24623 allow ACK to be sent after UNSUBSCRIBE
+
+enhancements
+25196 set AMQP reply-to header sensibly when using temporary queues
+25235 STOMP version 1.2 support
+25036 log various flavours of authentication failure distinctly
+25140 ensure all AMQP message properties are mapped to STOMP and vice-
+ versa
+
+performance improvements
+24872 improve performance reading non-tiny messages from the socket
+24968 improve performance reading large messages with content-length
+ header
+
+
+MQTT plugin
+-----------
+25025 new plugin: implement Message Queue Telemetry Transport version 3.1
+
+
+Web-STOMP plugin
+------------
+24468 new plugin: STOMP to the browser over websockets with SockJS
+ fallback
+
+
+JSON-RPC channel plugin
+-----------------------
+enhancements
+25282 ensure the plugin is dfsg-free
+25149 move JSON-RPC port out of the ephemeral range (to 15670)
+25259 simplify JSON-RPC listener configuration
+
+
+LDAP plugin
+-----------
+bug fixes
+25089 make {other_bind, as_user} work properly in non-AMQP contexts
+ (management, STOMP, etc.)
+
+enhancements
+25169 allow use of in_group test when the attribute to be checked is
+ not called "member"
+24677 substantially improve logging for debugging LDAP queries
+
+
+java client
+-------------
+bug fixes
+24910 prevent DefaultExceptionHandler.handleChannelKiller from
+ closing connection when it shouldn't
+
+enhancements
+24527 include an automated performance measuring tool (PerformanceMain)
+20709 eliminate Javadoc warnings
+
+
+.net client
+-------------
+bug fixes
+25255 ensure BaseConnection#Dispose does not hang
+
+enhancements
+23747 allow configuration of the underlying socket (thanks to Tomasz Zuber)
+25092 set first failed connection as the inner exception to
+ BrokerUnreachableException, thus improving exception display
+
+
+erlang client
+-------------
+bug fixes
+25108 fix connection supervision: amqp_connection_type_sup should not
+ be transient
+
+enhancements
+25057 provide functions to unregister handlers
+25034 remove some generically named -define()s from amqp_client.hrl,
+ which could clash with other applications
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.0.1.txt b/release-notes/README-3.0.1.txt
new file mode 100644
index 0000000000..167cf45871
--- /dev/null
+++ b/release-notes/README-3.0.1.txt
@@ -0,0 +1,66 @@
+Release: RabbitMQ 3.0.1
+
+server
+------
+bug fixes
+25305 fix leak when dead-lettering to an exchange which does not route to queues
+25309 fix {badmatch,false} when queue is deleted as mirroring policy changes
+25340 fix performance degradation when using small numbers of outstanding
+ confirms with mirrored queues
+25301 fix messages with per-message TTL not expiring under some circumstances
+25351 increase robustness when calculating memory use by plugins
+
+enhancements
+25335, 25330 allow mixed patch versions of RabbitMQ in a cluster
+
+
+management plugin
+-----------------
+bug fixes
+25348 fix web UI breakage for users with "management" tag
+25300 fix JSON encoding error listing non-AMQP connections
+25325 fix web UI links when filtering by virtual host
+25346 fix parameter and policy names missing from definitions export
+25326 [MSIE] fix policies listing page when more than 1 vhost exists
+25304 [MSIE] fix "Add policy" and "Add upstream" buttons
+25352 [MSIE] fix "Add user" and "Update user" buttons when a tag link has
+ been clicked
+25320 fix misleading error message when administrator has no permissions
+25321 fix invisible update of queue policy changes that had no effect
+
+
+web plugin support
+------------------
+bug fixes
+25318 issue well-formed Location header when redirecting to another port
+
+
+erlang client / shovel plugin / federation plugin
+-------------------------------------------------
+bug fixes
+25331 prevent deadlock when starting connection during application shutdown
+
+
+tracing plugin
+--------------
+bug fixes
+25341 fix failure of rabbitmqctl status when tracing plugin enabled
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.0.2.txt b/release-notes/README-3.0.2.txt
new file mode 100644
index 0000000000..fd9ffcc076
--- /dev/null
+++ b/release-notes/README-3.0.2.txt
@@ -0,0 +1,88 @@
+Release: RabbitMQ 3.0.2
+
+server
+------
+bug fixes
+25422 fix race causing queues to crash when stopping mirroring
+25353 fix issue preventing idle queues from hibernating sometimes
+25431 fix compilation on Erlang R16A
+25420 fix issue causing crash at startup if another node reports Mnesia
+ starting or stopping
+25360 fix race allowing channel commands to be sent after connection.close-ok
+25412 fix race allowing channel commands to be sent after server has closed
+ a channel
+25378 fix broken error reporting for rabbitmqctl
+
+
+STOMP plugin
+------------
+bug fixes
+25362 only add /reply-queue prefix in reply-to header when replying to a
+ temporary queue
+
+
+consistent hash exchange plugin
+-------------------------------
+bug fixes
+25403 clean up Mnesia resources correctly on exchange deletion
+
+
+management plugin
+-----------------
+bug fixes
+25401 fix error reporting for some broken policy declarations
+25413 prevent read access to /api/connections/<name>/channels for
+ non-monitoring users
+25335 fix rabbitmqadmin bash completion when extglob mode is switched off
+ in the shell
+
+
+management visualiser plugin
+----------------------------
+bug fixes
+25387 fix broken RabbitMQ logo
+
+
+shovel-management plugin
+------------------------
+bug fixes
+25410 fix breakage when shovel application is stopped manually
+
+
+web-STOMP plugin
+----------------
+bug fixes
+25359 prevent rabbitmqctl status from killing web-STOMP connections
+25357 update SockJS-Erlang to 0.3.4, fixing SockJS issue #41 (fix a traceback
+ when websocket is slow or busy)
+
+
+JSON-RPC plugin
+---------------
+bug fixes
+25347 fix hang of rabbitmqctl status when JSON-RPC plugin enabled
+
+
+.NET client
+-----------
+bug fixes
+25389 send 0-9-1 header rather than 0-9 header when AMQP 0-9-1 is selected
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.0.3.txt b/release-notes/README-3.0.3.txt
new file mode 100644
index 0000000000..4d19924602
--- /dev/null
+++ b/release-notes/README-3.0.3.txt
@@ -0,0 +1,55 @@
+Release: RabbitMQ 3.0.3
+
+server
+------
+bug fixes
+25457 fix connection failure to start reading again in rare circumstances
+ when coming out of flow control
+25419 ensure invocation of "rabbitmqctl stop_app" during server startup
+ on a fresh node does not leave a corrupted Mnesia schema
+25448 ensure messages expire immediately when reaching the head of a queue
+ after basic.get
+25456 ensure parameters and policies for a vhost are removed with that vhost
+25465 do not log spurious errors for connections that close very early
+25443 ensure "rabbitmqctl forget_cluster_node" removes durable queue records
+ for unmirrored queues on the forgotten node
+25435 declare dependency on xmerl in rabbit application
+
+
+Windows packaging
+-----------------
+bug fixes
+25453 make rabbitmq-plugins.bat take note of %RABBITMQ_SERVICENAME%
+
+
+management plugin
+-----------------
+bug fixes
+25472 clean up connection and channel records from nodes that have crashed
+25432 do not show 404 errors when rabbitmq_federation_management
+ is installed and rabbitmq_federation is not
+
+
+mqtt plugin
+-----------
+bug fixes
+25469 ensure the reader process hibernates when idle
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.0.4.txt b/release-notes/README-3.0.4.txt
new file mode 100644
index 0000000000..2e3e8af6f2
--- /dev/null
+++ b/release-notes/README-3.0.4.txt
@@ -0,0 +1,25 @@
+Release: RabbitMQ 3.0.4
+
+federation plugin
+-----------------
+bug fixes
+25483 prevent x-received-from header from leaking upstream credentials
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.1.0.txt b/release-notes/README-3.1.0.txt
new file mode 100644
index 0000000000..8559d80e90
--- /dev/null
+++ b/release-notes/README-3.1.0.txt
@@ -0,0 +1,173 @@
+Release: RabbitMQ 3.1.0
+
+server
+------
+bug fixes
+25524 fix memory leak in mirror queue mirror with many short-lived publishing
+ channels
+25518 fix handling of certain ASN.1 fields in SSL certificates
+25486 ensure we handle cross cluster resource alarms after a partition
+25490 limit frequency with which the server invokes "df" or "dir" to
+ measure disc use
+25491 ensure we detect partitions in an idle cluster
+25367 throttle in a more timely manner when reading large messages and
+ resource alarm goes off
+25535 correctly report failure when user with no password attempts
+ password-based login
+25463 fix logging of authentication mechanism on login failure
+25385 check equivalence for dead-lettering related arguments
+
+enhancements
+24407 manual eager synchronisation of mirrors
+25418 automatic eager synchronisation of mirrors by policy
+25358 cluster "autoheal" mode to automatically choose nodes to restart
+ when a partition has occurred
+25358 cluster "pause minority" mode to prefer partition tolerance over
+ availability
+19375 add x-max-length argument for queue length limits
+25247 allow "nodes" policy to change queue master
+25107 permit dead-letter cycles
+25415 return total number of consumers in queue.declare-ok, not "active" ones
+24980 cut down size of stdout banner, ensure everything goes to logs
+25474 ensure partitions warning clears without needing to stop the
+ winning partition
+25488 allow exchange types plugins to validate bindings
+25512 improve robustness and correctness of memory use detail reporting
+25501 allow plugins to implement new ha-modes
+25517 more cross-cluster compatibility checks at startup
+25513 support debugging for heavyweight gen_server2 state
+
+performance improvements
+25514 performance improvements in message store garbage collection
+25302 performance improvements in mirrors when consuming
+25311 performance improvements requeuing persistent messages
+25373 memory-use improvement while fetching messages
+25428 memory-use improvement in queue index
+25504 25327 performance improvements in dead lettering
+25363 25364 25365 25366 25368 25369 25371 25386 25388 25429
+ various minor performance improvements
+
+
+management plugin
+-----------------
+bug fixes
+25290 fix per-queue memory leak recording stats for mirror queue mirrors
+25526 ensure single-object API queries support ?columns= in query string
+
+enhancements
+23378 retain historical statistics, plot charts, show statistics per vhost,
+ improve performance of overview page, ensure message counters are
+ monotonic
+24114 ignore incoming statistics if the statistics database is overwhelmed
+23625 performance improvements when listing many queues
+23382 filter lists in the web UI
+25408 allow specification of arguments when declaring with rabbitmqadmin
+23438 allow admin to specify a reason when closing a connection
+25322 add "--version" support to rabbitmqadmin
+
+
+federation plugin
+-----------------
+bug fixes
+24223 handle basic.nack if sent by the server
+
+enhancements
+25406 allow specification of multiple URLs in an upstream for failover
+25433 allow choice of acknowledgement mode, permitting faster but less
+ reliable delivery
+
+
+old-federation plugin
+---------------------
+feature removal
+25484 remove the old-federation plugin which shipped with RabbitMQ 3.0.x
+
+
+shovel plugin
+-------------
+enhancements
+24850 support failover when running in a cluster
+
+
+Web-STOMP plugin
+----------------
+enhancements
+25333 update stomp.js library to support STOMP 1.1 (thanks to Jeff Mesnil)
+
+
+AMQP 1.0 plugin
+---------------
+25381 new plugin: initial support for AMQP 1.0
+
+
+STOMP plugin
+-----------
+bug fixes
+25464 more graceful handling of connection abort while opening
+25466 don't crash when SENDing to /temp-queue
+
+
+MQTT plugin
+-----------
+bug fixes
+25464 more graceful handling of connection abort while opening
+25487 do not break "rabbitmqctl status" if MQTT plugin is enabled
+
+
+consistent hash exchange
+------------------------
+enhancements
+25392 allow hashing on something other than routing key
+
+
+packaging
+---------
+enhancements
+25271 new standalone release tarball for OS X
+25497 add extra architectures to the apt repository
+25519 allow debian packages to depend on esl-erlang
+25002 merge contents of private umbrella into public one and remove private
+25339 move rabbitmq-test to its own repository
+
+
+java client
+-----------
+bug fixes
+25509 ensure channel numbers do not get reused before the server has finished
+ with them
+
+enhancements
+25356 make Channel.waitForConfirms(long) throw exception if confirm mode is
+ not selected
+24675 add support for existing exchanges / queues to MulticastMain
+
+
+erlang client
+-------------
+bug fixes
+25502 fail reasonably when attempting to connect to a server that does not
+ speak AMQP 0-9-1
+
+enhancements
+25377 remove "there is no confirm handler" warnings
+25503 don't allow client to negotiate frame sizes > 64Mb since it does not
+ support them
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.1.1.txt b/release-notes/README-3.1.1.txt
new file mode 100644
index 0000000000..88b06eca48
--- /dev/null
+++ b/release-notes/README-3.1.1.txt
@@ -0,0 +1,64 @@
+Release: RabbitMQ 3.1.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+25545 relax validation of x-match binding to headers exchange for
+ compatibility with brokers < 3.1.0
+25561 fix bug in ack handling for transactional channels that could
+ cause queues to crash
+25560 fix race condition in cluster autoheal that could lead to nodes
+ failing to re-join the cluster
+25546 fix crash when setting a prefetch count multiple times on the
+ same channel
+25548 fix vhost validation when setting policies and/or parameters
+25549 fix x-expires handling after last consumer disconnects
+25555 tighten up validation of HA-related policies
+
+
+shovel plugin
+-------------
+bug fixes
+25542 fix handling of default reconnect_delay
+
+
+management plugin
+-----------------
+bug fixes
+25536 set auth header correctly when downloading definitions
+25543 set message_stats to the correct value when empty
+
+
+federation-management-plugin
+----------------------------
+bug fixes
+25556 allow multiple URIs to be specified against an upstream
+
+
+.NET client
+-------------
+bug fixes
+25558 fix a bug that could lead to duplicate channel IDs being allocated,
+ causing a connection failure
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.1.2.txt b/release-notes/README-3.1.2.txt
new file mode 100644
index 0000000000..622383eb38
--- /dev/null
+++ b/release-notes/README-3.1.2.txt
@@ -0,0 +1,90 @@
+Release: RabbitMQ 3.1.2
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+
+25636 fix startup failure when using SSL with Erlang/OTP R16B01
+25571 fix potential deadlock in application shutdown handling (since 2.1.0)
+25567 fix queue crash requeuing in-memory messages (since 2.7.0)
+25599 fix queue record leak of exclusive durable queues after forceful
+ shutdown (since 3.0.1)
+25576 fix bug in queue index where a broker crash between segment
+ deletion and journal truncation could prevent the broker from
+ subsequently starting (since 2.0.0)
+25615 fix duplicate mirror queue mirrors starting on a single node (since 2.6.0)
+25588 ensure per-message-TTL is removed when messages are dead-lettered
+ (since 3.0.0)
+25575 fix bug handling empty rabbit_serial leading to startup failure
+ (since 1.7.0)
+25640 fix channel crash with a race between basic.ack and basic.cancel
+ when prefetch >= 1 (since 3.1.0)
+25638 fix leak affecting HA/mirrored queues (since 3.0.0)
+25611 improve stack traces when message store crash occurs
+25612 fix crashing processes when stopping node as part of a cluster
+ (since 2.4.0)
+
+
+stomp plugin
+-------------
+bug fixes
+25564 fix handling of reply-to for non-temporary queue destinations
+ (since 3.1.0)
+25566 allow unescaped colons in header values for STOMP 1.0 compatibility
+ (since 3.0.0)
+
+
+management plugin
+-----------------
+bug fixes
+25592 fix bug allowing unprivileged users to see stats for all vhosts
+ (since 3.1.0)
+25600 fix consumer record leak in the management database (since 2.2.0)
+25629 fix memory leak in the presence of long-lived channels and
+ short-lived queues (since 3.1.0)
+25580 fix bug preventing definitions file from loading if it contained
+ a policy from a non default vhost (since 3.1.1)
+
+
+LDAP plugin
+-----------
+bug fixes
+25573 fix garbled login failure errors (since 2.3.0)
+
+
+Java client
+-----------
+bug fixes
+25633 fix quoting and escaping in json parser, avoid a potentially
+ non-terminating loop and improve error handling (since 2.8.2)
+ (thanks to Bradley Peabody)
+
+
+Erlang client
+-------------
+bug fixes
+25521 fix negotiated frame-max handling, which was being ignored (since 2.0.0)
+25489 fix rpc client/server to ensure correlation-ids are valid UTF-8 strings
+ (since 2.0.0) (thanks to Daniel White)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.1.3.txt b/release-notes/README-3.1.3.txt
new file mode 100644
index 0000000000..3d0163d3e0
--- /dev/null
+++ b/release-notes/README-3.1.3.txt
@@ -0,0 +1,12 @@
+Release: RabbitMQ 3.1.3
+
+Release Highlights
+==================
+
+management plugin
+------------------
+bug fixes
+
+25643 fix bug that lead to incorrect reporting of accumulated
+ stats (since 3.1.2)
+
diff --git a/release-notes/README-3.1.4.txt b/release-notes/README-3.1.4.txt
new file mode 100644
index 0000000000..dc42d20f02
--- /dev/null
+++ b/release-notes/README-3.1.4.txt
@@ -0,0 +1,91 @@
+Release: RabbitMQ 3.1.4
+
+Security Fixes
+==============
+
+server
+------
+25686 ensure DLX declaration checks for publish permission (since 2.8.0)
+
+
+management plugin
+-----------------
+24803 update to a later version of Mochiweb that fixes a directory traversal
+ vulnerability allowing arbitrary file access on Windows (since 2.1.0)
+
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+25638 fix resource leak with mirrored queues when whole clusters stop
+ (since 3.0.0)
+25624 fix queue crash in mirrored queue handling of messages during promotion
+ (since 2.6.0)
+25615 25670 fix race conditions in mirrored queues when different cluster
+ nodes start and stop near-simultaneously (since 2.6.0)
+25617 fix corrupt_cluster_status_files error after abrupt node shutdown
+ (since 3.0.0)
+25645 fix mirrored queue sync failure in the presence of un-acked messages
+ not at the head of the queue (since 3.1.0)
+25640 fix race condition leading to channel crash with low prefetch count
+ repeated basic.consume and basic.cancel (since 3.1.0)
+25625 fix memory leak of mirrored queue messages during promotion
+ (since 2.6.0)
+25649 allow hipe compilation on Erlang R16B01
+25659 allow offline cluster node removal with a node which is not second
+ from last (since 3.0.0)
+25648 make `rabbitmqctl join_cluster' idempotent (since 3.0.0)
+25651 improve `rabbitmqctl cluster_status' handling of partition info when
+ cluster nodes are in the process of stopping (since 3.1.0)
+25689 ensure launch of subprocesses to monitor disk space and file handles
+ works correctly when clink shell is installed on Windows (since 2.1.0)
+25594 fix rabbit_error_logger crash during failed startup (since 1.4.0)
+25631 fix bug in shutdown sequence that could lead to spurious
+ INTERNAL_ERRORs being sent to clients (since 3.1.0)
+
+
+erlang client
+-------------
+bug fixes
+25632 fix broken error handling in amqp_network_connection that could lead
+ to a crash during broker shutdown (since 2.4.0)
+25688 fix bug in challenge-response auth handling (since 2.3.0)
+
+enhancements
+25674 add amqp_rpc_{client,server}:start_link()
+
+
+STOMP plugin
+------------
+bug fixes
+25691 fix connection crash on consumer cancellation notification (since 3.0.0)
+
+
+build and packaging
+-------------------
+bug fixes
+25668 add ssl support to OS X standalone package
+25584 ensure that VERSION is set correctly when building src packages
+ (since 2.7.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.1.5.txt b/release-notes/README-3.1.5.txt
new file mode 100644
index 0000000000..7e687f4426
--- /dev/null
+++ b/release-notes/README-3.1.5.txt
@@ -0,0 +1,39 @@
+Release: RabbitMQ 3.1.5
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+25713 fix crash in the delegate mechanism leading to various crashes, and
+ intra-cluster incompatibility between RabbitMQ 3.1.4 and other members
+ of the 3.1.x series (since 3.1.4)
+25700 25710 prevent (harmless) errors being logged when pausing in
+ pause_minority mode (since 3.1.0)
+
+
+LDAP plugin
+-----------
+bug fixes
+25703 prevent channel crash when attempting to retrieve LDAP attribute that
+ does not exist (since 2.7.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.2.0.txt b/release-notes/README-3.2.0.txt
new file mode 100644
index 0000000000..1f25349b86
--- /dev/null
+++ b/release-notes/README-3.2.0.txt
@@ -0,0 +1,191 @@
+Release: RabbitMQ 3.2.0
+
+server
+------
+bug fixes
+25602 fix race condition that could cause mirrored queues to corrupt state
+ during promotion (since 2.6.0)
+25745 prevent HA queue from becoming masterless if multiple nodes shutdown in
+ quick succession (since 2.6.0)
+25685 prevent race that leads to a masterless queue when a mirror and
+ previous leader replica start simultaneously (since 2.6.0)
+25815 ensure that persistent messages with expiration property timeout
+ correctly after broker restarts (since 3.0.0)
+25780 stop ram nodes from becoming disc nodes when started in isolation
+ (since 3.0.0)
+25404 prevent potential deadlocks during shutdown
+25822 prevent crash at startup when starting a clustered node hosting a
+ durable non-HA queue which had been bound to a transient exchange which
+ was deleted when the node was down (since 2.5.0)
+25390 tolerate corrupt queue index files with trailing zeroes during boot
+ (since 2.0.0)
+25704 remove possibility of "incompatible Erlang bytecode" failure in cluster
+ startup (since 3.1.0)
+25721 fix logging of config file location (since 3.1.0)
+25276 ensure queues declared as exclusive are not durable or mirrored
+ (since 2.6.0)
+25757 prevent error being logged when an exclusive queue owner disconnects
+ during declaration (since 3.0.0)
+25675 prevent crash when sending OTP status query to writer or heartbeater
+ processes (since 1.0.0)
+
+enhancements
+25553 support for federated queues
+25749 allow alternate and dead-letter exchanges, queue max length, expiry and
+ message TTL to be controlled by policy as well as AMQP arguments
+24094 report client authentication errors during connection establishment
+ explicitly using connection.close
+25191 inform clients when memory or disk alarms are set or cleared
+25572 allow policies to target queues or exchanges or both
+25726 make it harder to trigger the disk space alarm with default settings
+25597 offer greater control over threshold at which messages are paged to disk
+25716 allow missing exchanges & queues to be deleted and unbound without
+ generating an AMQP error
+25725 implement consumer priorities
+23958 backport OTP process supervision infrastructure improvements
+25733 relax type constraints of header exchanges
+25809 add support for specifying a SSL verify_fun name in the config file
+
+
+building & packaging
+--------------------
+enhancements
+20384 add sample configuration file
+
+dependency change
+25581 require at least Erlang version R13B03 for broker and plugins
+
+feature removal
+25455 remove RabbitMQ-maintained MacPorts repository
+
+
+management plugin
+-----------------
+bug fixes
+25601 report on queue lengths and data rates in a more timely fashion
+ (since 3.1.0)
+25676 display chart times in the local time zone rather than UTC (since 3.1.0)
+25770 prevent over-enthusiastic caching of web UI templates (since 2.1.0)
+
+enhancements
+25063 support arrays in web interface for arguments, policies and headers
+25598 display queue paging information
+25711 improve handling of defaults in config file by rabbitmqadmin (thanks to
+ Simon Lundström)
+25747 de-emphasise internal federation queues and exchanges
+25778 introduce 'policymaker' tag, permitting policy & parameter operations
+ without being full administrator
+25616 more readable number formatting in graph labels
+25641 permit turning tracing on/off using the HTTP API
+25811 add support for web UI authentication via the initial URI
+25792 optimise monitoring of file descriptors on OS X
+
+
+LDAP plugin
+-----------
+enhancements
+25479 support boolean operators in queries
+25724 avoid setting a probably non-existent host in the default configuration
+
+
+federation plugin
+-----------------
+bug fixes
+25707 prevent upstream queues from being deleted, thus preventing deletion
+ upon policy change (since 3.0.0)
+
+enhancements
+25554 allow federation policy to specify a single upstream instead of an
+ upstream-set
+25797 various performance enhancements
+
+
+Web-STOMP plugin
+----------------
+enhancements
+25699 support for implicit subscriptions
+
+
+AMQP 1.0 plugin
+---------------
+bug fixes
+25404 prevent potential deadlocks during shutdown (since 3.1.0)
+
+enhancements
+25539 make the default virtual host configurable
+
+
+STOMP plugin
+------------
+bug fixes
+25692 prevent potential deadlocks during shutdown (since 2.3.0)
+25789 prevent incomplete TCP connection attempts from leaking processes
+ (since 2.3.0)
+
+enhancements
+25539 make the default virtual host configurable
+
+
+MQTT plugin
+-----------
+bug fixes
+25577 ensure resumed subscriptions become active immediately after reconnecting
+25744 correct client shutdown sequence in the event of failed startup
+
+
+jsonrpc-channel plugin
+----------------------
+bug fixes
+25776 fix dependencies that preventing plugin from running (since 3.1.4)
+
+
+java client
+-----------
+bug fixes
+25708 prevent deadlock when calling blocking operations in handleCancel
+ (since 1.0.0)
+
+enhancements
+25736 added routing key to performance testing tool parameters
+25767 rename the performance testing tool (formerly MulticastMain)
+
+
+.net client
+-----------
+enhancements
+25552 make better use of generic types
+25595 consumer cancellation is now exposed as an event
+
+
+erlang client
+-------------
+bug fixes
+25682 prevent potential deadlocks during shutdown
+25743 prevent failures due to connection string lookup errors in protocols
+ other than AMQP 0-9-1 / 0-8 (since 2.8.1)
+25794 prevent startup error when using SSL on versions of Erlang from R16B01
+25677 prevent crash when reconsuming asynchronously with the same tag
+ (since 2.6.0)
+
+enhancements
+25520, 25804 optimise network performance (thanks to Jesper Louis Andersen)
+25782 support connection_timeout in AMQP URI
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.2.1.txt b/release-notes/README-3.2.1.txt
new file mode 100644
index 0000000000..3add3d1bf8
--- /dev/null
+++ b/release-notes/README-3.2.1.txt
@@ -0,0 +1,55 @@
+Release: RabbitMQ 3.2.1
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+25849 fix crash with {down_from_gm,down_from_gm} with multiply-mirrored queues
+ (since 3.2.0)
+25846 fix queue crashes when changing multiple HA policies simultaneously
+ (since 3.0.0)
+25618 ensure a mirrored queue mirror which crashes does not cause the leader replica to
+ crash (since 2.6.0)
+25838 prevent crashes due to timeouts when calling into the limiter
+ (since 3.1.0)
+25842 treat 32 bit Unix platforms as limited to 2GB not 4GB address space
+ (since 1.7.1)
+25845 reduce default heartbeat from 600s to 580s for better compatibility
+ with common load balancer configurations (since 3.0.0)
+25826 fix incorrect placement of file sync which could theoretically
+ corrupt files when written just before a crash (since 3.0.0)
+
+
+building / packaging
+--------------------
+bug fixes
+25835 ship a useful README in /usr/share/doc for Debian and RPM (since 1.0.0)
+
+
+management plugin
+-----------------
+bug fixes
+25811 fix web UI authentication via the initial URI for browsers which are
+ not Chrome (since 3.2.0)
+25861 fix web UI login when user name or password contains '%' (since 3.2.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.2.2.txt b/release-notes/README-3.2.2.txt
new file mode 100644
index 0000000000..eb76c4f243
--- /dev/null
+++ b/release-notes/README-3.2.2.txt
@@ -0,0 +1,60 @@
+Release: RabbitMQ 3.2.2
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+25873 prevent possibility of deadlock when mirrors start up (since 2.6.0)
+25867 ensure automatic synchronisation does not fail when policy change
+ causes new mirrors to start and the leader replica to change simultaneously
+ (since 3.1.0)
+25870 prevent the worker pool from running out of processes due to processes
+ crashing (since 1.8.0)
+25899 prevent race leading to cluster upgrade failure when multiple nodes
+ attempt secondary upgrade simultaneously (since 2.4.1)
+25912 correct reporting of flow control when connections become idle
+ (since 2.8.0)
+
+
+LDAP plugin
+-----------
+bug fixes
+25863 prevent channels crashing during broker shutdown (since 2.3.0)
+
+
+management plugin
+-----------------
+bug fixes
+25872 prevent empty queues from showing length as '?' after going idle
+ (since 3.1.0)
+25889 ensure GET /api/overview uses consistent data types when server is idle
+ (since 2.1.0)
+25920 prevent rabbitmqadmin failure when no home directory is set (since 3.2.0)
+
+
+MQTT plugin
+-----------
+bug fixes
+25887 prevent possible error in the presence of multiple client IDs
+25905 fix handling of acks from the broker with the 'multiple' flag set
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.2.3.txt b/release-notes/README-3.2.3.txt
new file mode 100644
index 0000000000..6b7b095fc5
--- /dev/null
+++ b/release-notes/README-3.2.3.txt
@@ -0,0 +1,64 @@
+Release: RabbitMQ 3.2.3
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+25936 stem leak when queues with active consumers terminate (since 3.2.0)
+25928 fix cosmetic error when sending connection.close-ok after client
+ already closed the connection (since 1.0.0)
+25965 limit messages to ~2GB to prevent "Absurdly large distribution output
+ data buffer" VM crash (since 1.0.0)
+24927 avoid broker being overwhelmed while logging benign messages starting with
+ "Discarding messages" (since 1.0.0)
+25952 prevent "Absurdly large distribution output data buffer" VM crash when
+ sending many/large messages to a mirrored queue (since 2.6.0)
+25925 remove extraneous service parameters when installing on windows
+ (since 1.5.0)
+25929 prevent error being logged when connection is closed while it is still
+ being opened (since 1.0.0)
+
+
+federation plugin
+-----------------
+bug fixes
+25945 ensure federated queues correctly stop federating messages when channels
+ close or crash without cancellation from consumers (since 3.2.0)
+25971 prevent crash of federated mirrored queues on deletion (since 3.2.0)
+25956 prevent federation of the queues used internally by federated exchanges
+ (since 3.2.0)
+25949 prevent unnecessary CPU use when ACKs are not in use (since 2.6.0)
+
+
+shovel plugin
+-----------------
+bug fixes
+25934 remove ordering constraint on configuration items (since 2.0.0)
+25949 prevent unnecessary CPU use when ACKs are not in use (since 2.0.0)
+
+
+LDAP plugin
+-----------
+bug fixes
+25914 fix use of dn_lookup_attribute configuration on OpenLDAP (since 2.8.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.2.4.txt b/release-notes/README-3.2.4.txt
new file mode 100644
index 0000000000..71510cb62f
--- /dev/null
+++ b/release-notes/README-3.2.4.txt
@@ -0,0 +1,87 @@
+Release: RabbitMQ 3.2.4
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26014 prevent 541 internal error removing a nonexistent binding from a topic
+ exchange (since 3.2.0)
+25762 fix failure to delete virtual host if a queue in the virtual host is
+ deleted concurrently (since 1.0.0)
+26013 ensure connection.blocked is sent in all circumstances it should be
+ (since 3.2.0)
+26006, 26038 ensure autoheal does not hang if a node is manually stopped
+ during autoheal (since 3.1.0)
+26000 prevent crash of mirrored supervisor in some circumstances after a
+ network partition is healed (since 2.6.0)
+25972 fix syntax error in example configuration file (since 3.2.0)
+
+
+management plugin
+-----------------
+bug fixes
+24476 prevent statistics database from vanishing after a network partition is
+ healed (since 2.8.3)
+25983 prevent "node statistics not available" error when mochiweb is
+ configured with an explicit list of SSL ciphers (since 2.6.0)
+
+
+federation plugin
+-----------------
+bug fixes
+25998 ensure upstreams which are timing out establishing network connections
+ can be deleted in a timely fashion (since 3.0.0)
+
+
+shovel plugin
+-------------
+bug fixes
+25996 ensure shovels which are timing out establishing network connections
+ do not block broker shutdown (since 2.3.0)
+
+
+STOMP plugin
+------------
+bug fixes
+26028 prevent potential deadlocks during shutdown (since 2.3.0)
+
+
+MQTT plugin
+-----------
+bug fixes
+25982 ensure messages published with QOS=1 are persistent (since 3.1.0)
+
+
+Erlang client
+-------------
+bug fixes
+26041 prevent rare, fake "541 internal error" reported client-side when
+ shutting down connections (since 2.1.1)
+
+
+.NET client
+-----------
+bug fixes
+26016 ensure SSL connection establishment times out if necessary (since 1.0.0)
+26047 ensure IModel.ConfirmSelect() is idempotent (since 2.3.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.3.0.txt b/release-notes/README-3.3.0.txt
new file mode 100644
index 0000000000..6787082deb
--- /dev/null
+++ b/release-notes/README-3.3.0.txt
@@ -0,0 +1,244 @@
+Release: RabbitMQ 3.3.0
+
+Security Fixes / Changes
+========================
+
+server
+------
+25603 prevent access using the default guest/guest credentials except via
+ localhost (since 1.0.0)
+
+
+LDAP plugin
+-----------
+26052 do not allow use of password "" when connecting to servers which
+ permit unauthenticated bind (since 2.3.0)
+
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26069 ensure that memory use is bounded when logging crash of large
+ processes (since 1.0.0)
+25589 ensure that a queue which needs to reduce its RAM usage but is not
+ receiving any publishes still pages out messages quickly (since 2.0.0)
+25991 fix topic routing in the face of multiple bindings that differ only
+ in arguments (e.g. those created by the federation plugin) (since 2.4.0)
+26027 ensure autoheal does not hang winner node if 'rabbitmqctl stop_app'
+ issued on other node during healing (since 3.1.0)
+26043 ensure autoheal does not crash if multiple autoheal requests occur
+ in quick succession and the leader make different decisions for each
+ (since 3.1.0)
+26088 fix failure to delete virtual host if a queue in the virtual host is
+ deleted concurrently (since 1.0.0) (incorrectly reported fixed in 3.2.4)
+25374 interpret AMQP field type 'b' as signed byte rather than unsigned, for
+ compatibility with our errata, and the majority of clients (since 1.0.0)
+26058 prevent inaccurate (sometimes negative) report of memory use by
+ plugins (since 3.1.0)
+26063 prevent spurious rabbit_node_monitor error messages when pausing in
+ pause_minority mode (since 3.2.4)
+
+enhancements
+25888 give busy queues a bias towards delivering more messages than they
+ accept, ensuring they tend to become empty rather than huge
+26070 automatically reconsume when mirrored queues fail over (and
+ introduce x-cancel-on-ha-failover argument for the old behaviour)
+25882 provide 'consumer utilisation' metric to help determine if consumers
+ are being held back by low prefetch counts
+26039 ensure rabbitmqctl's formatting of process IDs is more shell-script
+ friendly
+25654 allow use of separate modules for authentication and authorisation
+25722 explicitly set Erlang distribution port in all circumstances
+25910 add process identification information to process dictionary to aid in
+ debugging
+25922 log reason why a node considered another node to be down
+25860 enforce the rule that object names must be valid UTF-8 strings
+25836 prevent deletion of amq.* built-in exchanges, and mark appropriate ones
+ as internal
+25817 allow plugins to rewrite AMQP methods
+25979 announce cluster-id when clients connect
+26042 allow log_levels config item to silence mirrored queue events
+26065 ensure config file location is logged even if config file is absent
+22525 allow server-wide configuration of channel_max
+25627 show current alarms in 'rabbitmqctl status'
+
+performance improvements
+25957 change semantics of basic.qos global flag in order to allow for
+ greatly enhanced prefetch performance
+ (see https://www.rabbitmq.com/consumer-prefetch.html)
+25827,25853 substantially improve performance stopping and starting brokers
+ with many durable queues
+26001 improve performance of persistent confirmed messages on spinning disks
+24408 improve performance of mandatory publication (very substantially when
+ publishing across clusters)
+25939,25942,25943 improve performance reading and writing AMQP (especially
+ reading large messages)
+
+feature removal
+25962 remove support for client-sent channel.flow method; basic.qos is
+ superior in all cases
+25985 remove support for 'impersonator' tag
+
+
+building & packaging
+--------------------
+bug fixes
+25989 put the example config file in the correct directory for generic
+ Unix and standalone Mac, and put a copy in the AppData directory
+ on Windows (since 3.2.0).
+26093 Debian: remove obsolete DM-Upload-Allowed field (thanks to Thomas
+ Goirand)
+26094 Debian: ensure package can be built multiple times and remove
+ files from debdiff that should not be there (thanks to Thomas Goirand)
+
+enhancements
+25844 switch standalone Mac release to 64 bit architecture
+
+
+management plugin
+-----------------
+bug fixes
+25868 ensure connections in flow control for a long time still show 'flow'
+ and do not transition to 'blocked' (since 2.8.0)
+
+enhancements
+24829 provide information about flow control status of internal components,
+ to help find bottlenecks
+25876 provide regex option when filtering lists (with thanks to Joseph Weeks)
+
+feature removal
+25720 remove the redirect from 2.x-era port 55672 to 15672
+
+
+federation plugin
+-----------------
+enhancements
+23906 implement cycle detection for messages forwarded over federation
+25985 remove requirement to configure local-username
+26042 allow log_levels config item to silence federation events
+25979 replace local-nodename with (non-federation-specific) cluster-id
+25902 preserve original routing key when forwarding messages via
+ queue federation
+25904 tidy up nomenclature in federation status / management
+
+
+shovel plugin
+-------------
+enhancements
+24851 introduce "dynamic" shovels, controlled by parameters in a similar way
+ to federation (see https://www.rabbitmq.com/shovel-dynamic.html)
+25890 make shovel status rather more informative
+25894 introduce "auto-delete" dynamic shovels
+25935 allow dynamic shovels to record routing information similarly to
+ federation
+26045 default prefetch-count to 1000 rather than unlimited
+
+
+LDAP plugin
+-----------
+enhancements
+25785 add 'dn_lookup_bind' to allow lookup of a user's DN before binding
+25839 allow specification of SSL options for (e.g.) presenting client
+ certificates when connecting to an LDAP server
+26022 support timeouts when connecting to an LDAP server
+25570 replace 'as_user_no_password' error with something which makes more sense
+
+
+STOMP plugin
+------------
+bug fixes
+26010 remove examples for the headers exchange that haven't worked since 2010
+
+
+MQTT plugin
+-----------
+bug fixes
+25941 ensure keepalives are implemented completely (since xxx)
+
+enhancements
+26067 initial support for MQTT 3.1.1 draft
+25877 support specifying vhost at the time of connection
+
+
+Web-STOMP plugin
+----------------
+bug fixes
+25896 ensure examples set content-type (since 3.0.0)
+
+enhancements
+25828 upgrade cowboy to get sec-websocket-protocol support
+25913 support SSL (with thanks to James Shiell)
+
+
+JSON-RPC channel plugin
+-----------------------
+feature removal
+26029 remove JSON-RPC channel plugin from the official release
+
+
+java client
+-----------
+enhancements
+14587 support automatically reconnecting to server(s) if connection is
+ interrupted
+26008 make it easier to start a Connection with a custom ExceptionHandler
+25833 allow specifying a per-ConnectionFactory ExecutorService
+25999 handle running in a security-restricted context (e.g. Google App engine)
+25663 improve type safety of ShutdownSignalException "reason" property
+26068 improve clarity of AlreadyClosedException reason
+26015 make Envelope.toString() do something useful
+
+
+.net client
+-----------
+bug fixes
+25911 ensure Subscriptions are informed if a channel closes (since 1.4.0)
+25374 interpret AMQP field type 'b' as signed byte rather than unsigned, for
+ compatibility with our errata, and the majority of other clients
+ (since 1.0.0)
+25046 ensure timeout establishing connection does not throw
+ System.NotSupportedException (since 1.0.0)
+25278 ensure timeout establishing connection does not throw
+ ProtocolVersionMismatchException (since 2.0.0)
+26071 ensure attempted use of a closed channel leads to
+ AlreadyClosedException (since 1.0.0)
+25082 ensure EventingConsumer sets supertype model field (since 1.0.0)
+
+enhancements
+25895 support for SSL cert selection and validation callbacks (thanks to
+ "pooleja")
+26068 improve clarity of AlreadyClosedException reason
+
+
+erlang client
+-------------
+bug fixes
+25374 interpret AMQP field type 'b' as signed byte rather than unsigned, for
+ compatibility with our errata, and the majority of other clients
+ (since 1.0.0)
+26050 add missing handle_server_cancel/2 to amqp_gen_consumer_spec.hrl
+
+enhancements
+25985 do not require direct connections to specify a username
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.3.1.txt b/release-notes/README-3.3.1.txt
new file mode 100644
index 0000000000..276bbf96eb
--- /dev/null
+++ b/release-notes/README-3.3.1.txt
@@ -0,0 +1,107 @@
+Release: RabbitMQ 3.3.1
+
+Security Fixes
+==============
+
+MQTT plugin
+-----------
+26109 prevent potential DOS attack on SSL handshake failure (since 3.0.0)
+
+
+shovel plugin
+-------------
+26100 prevent dynamic shovels from allowing policymaker users to access vhosts
+ they should not be able to (since 3.3.0)
+
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26084 fix race condition causing queue mirrors to occasionally not be promoted
+ on clean leader replica shutdown (since 3.0.0)
+26115 prevent badmatch crash on mirror queue init during rapid restarts
+ (since 3.2.0)
+26117 prevent mirror being incorrectly considered dead when added at the same
+ time as another mirror dies (since 3.2.0)
+26118 prevent mirror queue crash if a queue hibernates before it has fully
+ started (since 3.2.2)
+26125 prevent possible deadlock when mirror becomes synchronised
+ simultaneously with another mirror starting (since 3.1.0)
+26103 ensure dead-letter cycle detection works when dead-lettering messages due
+ to exceeding the queue's max-length (since 3.1.0)
+26123 ensure worker pool state does not become corrupt if a worker process
+ crashes while idle (since 1.8.0)
+25855 ensure disk monitor does not crash the broker on platforms where
+ intermediate OS processes may be killed such as Google Compute Engine
+ (since 2.8.2)
+26096 clarify rabbitmqctl diagnostic messages (since 3.3.0)
+26102 prevent cosmetic GM crash on clean shutdown (since 2.8.7)
+26104 fix format of queue synchronisation log messages (since 3.3.0)
+26114 ensure crash report shrinking does not shrink reports too much
+ (since 3.3.0)
+
+enhancements
+26098 bring back 'impersonator' tag removed in 3.3.0
+26113 add a capability to allow clients to detect the new qos semantics
+ introduced in 3.3.0 without requiring an explicit version check
+
+
+management plugin
+-----------------
+bug fixes
+26140 prevent malformed message being created when publishing with priority
+ or timestamp properties set (since 2.4.0)
+26110 ensure statistics database GC works in a timely manner when the number
+ of objects tracked grows rapidly (since 3.1.0)
+26124 prevent "" being added as the last element of an array when adding
+ an array to queue or exchange arguments via the web UI (since 3.2.0)
+26127 ensure that statistics database startup does not block broker startup
+ for O(queues) time (since 2.8.0)
+26134 improve diagnostics when failing to count used FDs on {Free,Open,Net}BSD
+ (since 2.8.0)
+
+
+shovel-management plugin
+------------------------
+bug fixes
+26105 allow adding dynamic shovels when there are multiple virtual hosts
+ and show correct UI to users with policymaker and monitoring tags
+ (since 3.3.0)
+
+
+STOMP plugin
+------------
+bug fixes
+26061 reject publishes to destination "", rather than creating a server-named
+ queue (since 2.0.0)
+
+
+Java client
+-----------
+bug fixes
+26111 prevent connection crash on recovery when a connection consumes from
+ many queues (since 3.3.0)
+26099 clarify AlreadyClosedException.tostring() in the case when the
+ connection closed for a non-AMQP reason (since 3.3.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.3.2.txt b/release-notes/README-3.3.2.txt
new file mode 100644
index 0000000000..7d5a7725f1
--- /dev/null
+++ b/release-notes/README-3.3.2.txt
@@ -0,0 +1,118 @@
+Release: RabbitMQ 3.3.2
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26180 prevent certain operations (including queue creation and deletion)
+ blocking until a connection closes when the socket limit is reached
+ (since 2.7.0)
+26227 fix incorrect log message about config file location when running as
+ a Windows service, changing RABBITMQ_CONFIG_FILE and not reinstalling
+ the service (since 3.3.0)
+26172 ensure mirror queue does not hang if the GM process crashes at queue
+ startup (since 2.6.0)
+26178 prevent error logger crash in rare circumstances (since 3.3.0)
+26184 prevent small log messages being needlessly truncated (since 3.3.0)
+26226 ensure rabbitmqctl status does not crash if invoked while Mnesia is
+ starting or stopping (since 3.0.0)
+26200 fix garbled SSL log messages (since 3.0.3)
+26203 prevent spurious log message if mirror queue crashes early (since 3.2.2)
+
+
+management plugin
+-----------------
+bug fixes
+26197 fix garbled error message if importing JSON definitions file with invalid
+ input (since 2.1.0)
+26209 ensure reasons for authentication failure are always logged (since 2.1.0)
+
+enhancements
+25376 add documentation on the JSON schema returned by GET queries
+
+
+shovel plugin
+-------------
+bug fixes
+26219 fix creation of dynamic shovels using direct connection URLs through
+ rabbitmqctl (since 3.3.1)
+26176 prevent deadlock deleting virtual host with active dynamic shovel on
+ single core machine (since 3.3.0)
+
+
+federation plugin
+-----------------
+bug fixes
+26176 prevent deadlock deleting virtual host with active federation link on
+ single core machine (since 3.0.0)
+
+
+shovel-management plugin
+------------------------
+bug fixes
+26165 ensure the status of static shovels is correctly shown (since 3.3.1)
+
+
+LDAP plugin
+-----------
+bug fixes
+26190 fix crash when LDAP uses SSL and nothing else does (since 2.3.0)
+
+
+auth-mechanism-ssl plugin
+-------------------------
+bug fixes
+25550 allow use of both certificate and password based authentication at the
+ same time (since 2.3.0)
+
+
+MQTT plugin
+-----------
+bug fixes
+26194 prevent hang on broker shutdown when there are active MQTT connections
+ (since 3.0.0)
+26189 fix connection crash on shutdown if the connection starts very early
+ (since 3.0.0)
+
+
+STOMP plugin
+------------
+bug fixes
+25550 allow use of both certificate and password based authentication at the
+ same time (since 2.3.0)
+
+
+Java client
+-----------
+bug fixes
+26187 ensure network recovery delay is used when recovering from all types of
+ exception (since 3.3.0)
+26188 ensure TopologyRecoveryException includes cause's message (since 3.3.0)
+26196 fix Javadoc for ConnectionFactory.setSocketConfigurator()
+
+
+Erlang client
+-------------
+bug fixes
+26160 declare xmerl application dependency (since 3.3.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.3.3.txt b/release-notes/README-3.3.3.txt
new file mode 100644
index 0000000000..fff7d6a7b0
--- /dev/null
+++ b/release-notes/README-3.3.3.txt
@@ -0,0 +1,46 @@
+Release: RabbitMQ 3.3.3
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26236 prevent log files from being silenced if certain processes crash
+ (since 3.3.2)
+26241 fix disk space monitor crash when using {mem_relative, Ratio}
+ configuration (since 3.2.0)
+24759 run shell scripts with '-e' (since 1.0.0)
+
+
+STOMP plugin
+------------
+bug fixes
+26238 fix queue leak on subscription to /exchange/<name>/<binding> when the
+ exchange does not exist (since 2.0.0)
+
+
+Java client
+-----------
+bug fixes
+26232 ensure channel shutdown listeners are not lost on connection recovery
+ (since 3.3.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.3.4.txt b/release-notes/README-3.3.4.txt
new file mode 100644
index 0000000000..e237ae77d5
--- /dev/null
+++ b/release-notes/README-3.3.4.txt
@@ -0,0 +1,46 @@
+Release: RabbitMQ 3.3.4
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26258 fix startup failure on systems with non-GNU readlink when started from a
+ symlink (e.g. Mac Homebrew) (since 3.3.3)
+26247 fix startup failure when inet_dist_listen_min / inet_dist_listen_max set
+ (since 3.3.3)
+26253 prevent unclear error message when config file completely empty
+ (since 3.3.0)
+
+
+STOMP plugin
+------------
+bug fixes
+26246 don't log a crash when client misses heartbeat (since 2.3.0)
+
+
+web-STOMP plugin
+----------------
+bug fixes
+26250 fix crash when Web-STOMP is the only SSL user in the broker on Erlang
+ R16B03 or later (since 3.0.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.3.5.txt b/release-notes/README-3.3.5.txt
new file mode 100644
index 0000000000..e2480a7e12
--- /dev/null
+++ b/release-notes/README-3.3.5.txt
@@ -0,0 +1,99 @@
+Release: RabbitMQ 3.3.5
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+25921 prevent long delays in publishing after a node goes down and network
+ connections to it time out (since 2.8.3)
+26225 26293 greatly reduce the length of time between pause_minority mode
+ detecting a minority and refusing to accept further publishes
+ (since 3.1.0)
+26313 do not allow clients to override server-configured channel_max
+ (since 3.3.0)
+26159 prevent failure to start if memory monitor cannot determine total
+ system memory (since 1.7.1)
+26290 correctly read /proc/meminfo on Linux even if rows do not contain
+ colons (issue with certain vendor kernels) (since 1.7.1)
+
+enhancements
+26311 provide a mechanism for diagnosing stuck processes
+
+
+building & packaging
+--------------------
+bug fixes
+26322 add loopback_users to the sample configuration file (since 3.3.0)
+
+
+management plugin
+-----------------
+bug fixes
+26072 provide unminimised versions of all bundled Javascript libraries.
+ Fixes Debian bug #736781. (since 2.1.0)
+
+
+management visualiser plugin
+----------------------------
+bug fixes
+26072 provide unminimised versions of all bundled Javascript libraries.
+ Fixes Debian bug #736781. (since 2.1.0)
+
+
+federation plugin
+-----------------
+bug fixes
+26272 ensure changes to cluster name are picked up promptly and thus fix
+ cycle detection on cluster name change (since 3.3.0)
+26292 ensure that federation links apply the defined reconnect delay under
+ all circumstances (since 2.6.0)
+26299 fix leak when shrinking upstream-set immediately after federation
+ starts (since 3.0.0)
+
+
+shovel plugin
+-------------
+bug fixes
+26318 prevent dynamic shovel crash using add-forwarding-headers=true
+ without setting dest-queue or dest-exchange-key (since 3.3.0)
+26292 ensure that shovel workers apply the defined reconnect delay under
+ all circumstances (since 2.0.0)
+26328 prevent dynamic shovels from failing over to the new node whenever
+ a node comes up in a cluster (since 3.3.0)
+
+
+MQTT plugin
+-----------
+bug fixes
+26270 improve error messages on TLS/TCP connection failure (since 3.0.0)
+26281 log cleanly closed MQTT connections as we do for AMQP (since 3.0.0)
+
+
+AMQP 1.0 plugin
+---------------
+bug fixes
+26288 fix handling of the symbol type in message content (as used in
+ content_type and content_encoding) (since 3.1.0)
+26288 (also) fix mapping of AMQP 1.0 ttl and creation_time fields to
+ AMQP 0-9-1 timestamp and expiration fields (since 3.1.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.4.0.txt b/release-notes/README-3.4.0.txt
new file mode 100644
index 0000000000..df04486cd9
--- /dev/null
+++ b/release-notes/README-3.4.0.txt
@@ -0,0 +1,269 @@
+Release: RabbitMQ 3.4.0
+
+Security Fixes
+==============
+
+management plugin
+-----------------
+26414 do not trust X-Forwarded-For header when enforcing 'loopback_users'
+
+various
+-------
+26419 disable SSLv3 by default to prevent the POODLE attack
+
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26354 prevent force_event_refresh message from killing connections that have
+ not fully started (since 3.3.0)
+26347 ensure bindings are deleted when deleting queue records as part
+ of rabbitmqctl forget_cluster_node (since 3.0.3)
+26341 add assertions to prevent silent failure from DETS errors in
+ rabbit_recovery_terms (since 3.3.0)
+26171 prevent crash in rare conditions in gm:find_member_or_blank/2 during
+ mirror startup (since 2.6.0)
+26368 prevent autoheal from hanging when loser shuts down before the winner
+ learns it is the winner (since 3.1.0)
+25850 prevent excessive binary memory use when accepting or delivering
+ large messages at high speed (since 1.0.0)
+26230 ensure exchanges and queues are federated appropriately when
+ created when policy exists to make them so but the plugin is
+ not enabled (since 3.0.0)
+26389 prevent consumer utilisation getting stuck at 0% after busy queue
+ goes idle (since 3.3.0)
+26370 prevent "rabbitmqctl cluster_status" from breaking the database
+ if invoked at the wrong point during first startup (since 3.0.0)
+26295 ensure "rabbitmqctl wait" waits for plugins to start
+26336 fix logging when cluster auto-config fails (since 3.0.0)
+26338 log enotconn as 'connection_closed_abruptly', not an internal
+ error (since 1.0.0)
+26343 fix warning about missing behaviour_info/1 in supervisor2 with
+ older Erlang (since 3.2.0)
+26363 ensure cluster auto-config does not try to cluster with nodes
+ which have had "rabbitmqctl stop_app" invoked (since 3.0.0)
+26378 fix compilation warnings about conflicting behaviours (since 2.6.0)
+26386 ensure broker starts even if vhost pointed to by default_vhost
+ config item has been deleted (since 1.0.0)
+26404 prevent queue synchronisation from hanging if there is a very
+ short partition just as it starts (since 3.1.0)
+
+enhancements
+21446 allow crashing queue processes to recover, using persistent data
+ if present
+25813 provide fast direct route for RPC replies
+ (see https://www.rabbitmq.com/direct-reply-to.html)
+24926 allow plugins to be enabled / disabled without restarting the server
+25884 add argument and policy to limit queue length in bytes
+26150 prevent clean leader replica shutdown from promoting unsynchronised
+ mirrors and thus losing messages; add ha-promote-on-shutdown
+ to configure
+26151 make queues located on down cluster nodes visible in "rabbitmqctl
+ list_queues"
+26213 prevent undefined behaviour during partial partitions by
+ promoting them to full ones
+26254 allow "rabbitmqctl_forget_cluster_node" to promote mirror queue mirrors
+ that are down and thus recover from loss of a node containing masters
+ after it was the last node to stop
+26256 add "rabbitmqctl force_boot" command to allow administrator to
+ override RabbitMQ's idea of the last node to shut down
+26307 add messages_{ready,unacknowledged}_ram / messages_ram /
+ messages_persistent queue info keys
+25666 / 26339 add message_bytes / message_bytes_{ready,unacknowledged,
+ ram,persistent} queue info keys
+25214 improve robustness in the face of stray messages from Mnesia after
+ partitions
+25279 make SSL handshake timeout configurable
+25678 make mnesia table loading timeout configurable
+26148 add username and vhost to amqp_error log messages
+26169 add username / connection information to firehose trace messages
+26242 improve clarity of rabbitmqctl error messages when stop_app has been
+ invoked
+26225 provide connection age in rabbitmqctl list_connections
+25446 add "rabbitmq-plugins set" subcommand
+25824 / 26398 provide a summary of binary memory use
+26397 split out memory used by mirrors vs masters / unmirrored in the
+ memory summary
+26401 split out memory used by connection readers / writers / channels / other
+ in the memory summary
+26192 improve usability of "rabbitmqctl remove_cluster_node --offline" by
+ not requiring the user to start a node with RABBITMQ_NODE_ONLY
+18626 add RABBITMQ_USE_LONGNAME (with thanks to Marcos Diez)
+26204 allow message TTL and queue expiry times above (2^32)-1 milliseconds
+26211 fix use of type specifications deprecated in Erlang 17
+26366 improve error messages when queue / exchange equivalence checks fail
+26387 use new strange way to determine OTP minor version number
+26394 add cluster heartbeat messages at a faster rate than net_ticktime
+26406 add environment for plugins and non-RabbitMQ apps to "rabbitmqctl
+ environment"
+25848 warn if RABBITMQ_SERVER_ERL_ARGS set in a way that will lead to
+ poor performance
+25454 warn if rabbitmq-plugins and rabbitmq-server disagree on the
+ location of the enabled_plugins file
+26221 improve performance of queue.declare{nowait=true}
+
+
+building & packaging
+--------------------
+enhancements
+26344 ensure missing config file is correctly logged in .deb / RPM
+ (since 3.3.0)
+26154 switch standalone OS X build to use Erlang 17.1
+26040 add missing BuildRequires to RPM spec (since 1.8.0)
+26411 fix warning on Debian build clean due to deleted files
+
+
+management plugin
+-----------------
+enhancements
+26107 provide (and default to) mode where we maintain message rates
+ only per object (queue, exchange etc) not per object
+ pair (queue->channel etc) to save memory
+26174 improve responsiveness of management API under load
+25329 maintain history and draw charts for some per-node stats
+ (memory, disk space etc)
+25470 provide UI to show / hide series in charts in the web UI
+26382 provide UI to show / hide columns columns in the web UI
+26225 provide connection age in connection list
+25824 provide a summary of binary memory use
+26151 make queues located on down cluster nodes visible in queue list
+23724 provide API to list all consumers
+26340 redesigned, more concise interface for queue / exchange /
+ policy arguments
+26193 display locations of configuration, database and logs in management
+26193 detect and warn on mismatched net_ticktime setting
+26235 show enabled plugins in management
+25984 switch to HTML5 local storage where available, ensure
+ multiple web UIs on same host do not share login
+26358 support setting message properties with "rabbitmqadmin publish"
+26390 ensure all charts have the same time range
+26391 make "rabbitmqadmin list" restrict to a default set of columns
+
+bug fixes
+26399 ensure statistics do not depend on erlang:now/0 being in sync with
+ os:timestamp/0 (since 3.2.0)
+
+
+shovel plugin
+-------------
+enhancements
+26239 allow dynamic shovels to set message properties like static
+ ones do, and allow static shovels to use add_forward_headers like
+ dynamic ones do
+
+
+LDAP plugin
+-----------
+enhancements
+26275 support LDAP connections using StartTLS (requires Erlang R16B03 or later)
+
+
+tracing plugin
+--------------
+enhancements
+26357 add milliseconds to timestamps
+
+
+STOMP plugin
+------------
+enhancements
+26306 add flow control for message deliveries through STOMP; greatly reduces
+ memory use when slow consumers without prefetch-count connect to a
+ large / fast moving queue
+26243 ensure all stomp-named queues are named "stomp-*"
+26266 support "requeue" header on NACK frames
+
+
+MQTT plugin
+-----------
+enhancements
+26330 add flow control for message deliveries through MQTT; greatly reduces
+ memory use when slow consumers without prefetch-count connect to a
+ large / fast moving queue
+
+bug fixes
+26356 fix incorrect reporting of MQTT protocol version when using MQTT 3.1.1
+
+
+Web-STOMP plugin
+----------------
+enhancements
+26392 don't depend on the SockJS CDN
+
+
+java client
+-----------
+enhancements
+26402 provide a means to configure the time given to slow consumers
+ to continue consuming internally queued messages after the
+ connection closes
+26359 add listeners for queue name changes during recovery
+26207 add APIs to make methods easier to use in nowait mode
+26121 add --randomRoutingKey flag to PerfTest
+26091 add --consumerRate flag to PerfTest
+26348 make ConnectionFactory's networkRecoveryInterval property into a long
+
+bug fixes
+26364 clean up client-side references to auto-deleted queues in the
+ common case (since 3.3.0)
+26374 limit size of WorkPool queues, thus prevent slow consumer with no
+ prefetch limit from consuming unbounded memory (since 2.7.0)
+26413 prevent duplicate connection recovery listeners from being
+ registered (since 3.3.0)
+
+dependency change
+26095 drop support for Java 1.5
+
+licencing change
+24543 make the Java client additionally avaliable under the ASL2
+
+
+.net client
+-----------
+enhancements
+26130 automatic connection recovery similar to that of the Java client
+26208 add APIs to make methods easier to use in nowait mode
+26324 introduce an interface for ConnectionFactory
+26334 set up stream timeouts as early as possible (thanks to John Oliver)
+26199 allow IO and heartbeat to be background threads
+25525 allow Subscription class to set explicit consumer tag
+26097 add support for nack / reject in Subscription
+26122 remove unnecessary lock in Subscription
+
+feature removal
+26131 / 26132 remove support for versions of AMQP prior to 0-9-1
+26133 remove redirect following
+
+
+erlang client
+-------------
+enhancements
+26166 allow default ssl options to be provided in the configuration file
+
+bug fixes
+26418 ensure writer death is detected in direct connections (since 3.2.0)
+ (with thanks to Christopher Faulet)
+26346 ensure amqp_rpc_client uses exclusive, autodelete response
+ queues (since 1.3.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.4.1.txt b/release-notes/README-3.4.1.txt
new file mode 100644
index 0000000000..b742c67295
--- /dev/null
+++ b/release-notes/README-3.4.1.txt
@@ -0,0 +1,69 @@
+Release: RabbitMQ 3.4.1
+
+Security Fixes
+==============
+
+management plugin
+-----------------
+26437 prevent /api/* from returning text/html error messages which could
+ act as an XSS vector (since 2.1.0)
+26433 fix response-splitting vulnerability in /api/downloads (since 2.1.0)
+(thanks to Atholl Stewart for finding the above)
+
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26425 ensure RABBITMQ_USE_LONGNAME / USE_LONGNAME is picked up correctly from
+ rabbitmq-env.conf (since 3.4.0)
+
+enhancements
+26429 add log messages when plugins are enabled or disabled at runtime
+
+
+management plugin
+-----------------
+bug fixes
+26431 fix web UI breakage when queue listing contains exclusive queues
+ (since 3.4.0)
+26438 fix internal server error when requesting permissions for a user or
+ vhost which does not exist (since 2.1.0)
+
+
+Java client
+-----------
+bug fixes
+26434 prevent exchange binding recovery from swapping source and
+ destination (since 3.3.0)
+26428 ensure pom.xml lists ASL 2.0 (since 3.4.0)
+
+
+.NET client
+-----------
+bug fixes
+26439 ensure attempt to open a channel on a closed connection fails
+ immediately (since 1.0.0)
+26435 fix typos in documentation and remove references to immediate
+ publishing (since 3.3.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.4.2.txt b/release-notes/README-3.4.2.txt
new file mode 100644
index 0000000000..701207c4aa
--- /dev/null
+++ b/release-notes/README-3.4.2.txt
@@ -0,0 +1,66 @@
+Release: RabbitMQ 3.4.2
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+25788 prevent HA queue synchronisation from taking quadratic time when
+ there are many messages on disk (since 3.1.0)
+26474 prevent false positive detection of partial partitions (since 3.4.0)
+26460 prevent badarg in rabbit_diagnostics:looks_stuck/1 (since 3.3.5)
+26417 ensure rabbitmqctl does not get falsely disconnected from the
+ broker when net_ticktime has been reduced (since 1.0.0)
+26449 fix garbled inequivalent argument error messages (since 3.4.0)
+26468 fix removal of unmirrored queues as part of "rabbitmqctl
+ forget_cluster_node --offline" (since 3.4.0)
+26470 improve reliability of promotion of mirror mirrors as part of
+ "rabbitmqctl forget_cluster_node --offline" (since 3.4.0)
+26367 ensure dead letter exchange arguments are checked for equivalence
+ on queue declaration (since 3.1.4)
+
+building and packaging
+----------------------
+bug fixes
+26441 fix rabbitmqctl on the OS X standalone release (since 3.4.0)
+
+
+management plugin
+-----------------
+bug fixes
+26472 prevent management agent crashing when log location set to 'tty'
+ (since 3.4.0)
+26451 make sure web UI disk chart says "disk free" not "disk used" (since 3.4.0)
+26455 fix race condition rendering page (since 2.1.0)
+26464 fix drop at the end of data rate charts (since 3.2.0)
+
+
+shovel plugin
+-------------
+bug fixes
+26452 make sure auto-delete shovels remove their record from shovel status
+ when deleting (since 3.3.0)
+26454 fix autodelete shovel behaviour when started on an empty
+ queue (since 3.3.0)
+26453 prevent shovel-management HTTP API returning 500 if queried just as
+ dynamic shovel is being deleted (since 3.3.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.4.3.txt b/release-notes/README-3.4.3.txt
new file mode 100644
index 0000000000..d5f5b1294c
--- /dev/null
+++ b/release-notes/README-3.4.3.txt
@@ -0,0 +1,107 @@
+Release: RabbitMQ 3.4.3
+
+Security Fixes
+==============
+
+management plugin
+-----------------
+26515 prevent XSS attack in table key names (since 2.4.0)
+ (thanks to Robert Fitzpatrick)
+ (CVE-2015-0862)
+26516 prevent XSS attack in policy names (since 3.4.0)
+ (thanks to Robert Fitzpatrick)
+ (CVE-2015-0862)
+26517 prevent XSS attack in client details in the connections list
+ (CVE-2015-0862)
+26518 prevent XSS attack in user names in the vhosts list or the vhost names
+ in the user list (since 2.4.0)
+ (CVE-2015-0862)
+26520 prevent XSS attack in the cluster name (since 3.3.0)
+ (CVE-2015-0862)
+
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26490 in autoheal mode, ensure the leader doesn't stop before the winner told
+ it so (since 3.3.0)
+26491 in autoheal mode, prevent a race in Mnesia by waiting for Mnesia
+ shutdown on all losing nodes (since 3.1.0)
+26478 fix startup or rabbitmqctl failures when RABBITMQ_CTL_ERL_ARGS is set
+ (since 3.4.0)
+26498 fix queue crash with assertion failure in rare circumstances
+ (since 3.4.0)
+26081 improve error message when creating a cluster with mismatched Erlang
+26446 improve error message when a plugin is incompatible with current Erlang
+26265 ensure that plugins modules are picked before other third-party modules
+26503 support ssl's verify_fun from Erlang R14B+ (since 3.2.0)
+26502 fix 'backing_queue_status' duplication in /api/queues REST API (since
+ 3.4.0)
+
+enhancements
+26493 add top_memory_use and top_binary_refs diagnostic tools
+
+
+federation management plugin
+----------------------------
+bug fixes
+26519 fix double HTML escaping in upstream names (since 2.4.0)
+
+
+shovel management plugin
+------------------------
+bug fixes
+26519 fix double HTML escaping in dynamic shovel names (since 2.4.0)
+
+
+tracing plugin
+--------------
+bug fixes
+26519 fix double HTML escaping in tracing log file names (since 2.4.0)
+
+
+AMQP 1.0 plugin
+---------------
+bug fixes
+26486 use env(1) in codegen.py to find python(1) while building (since 3.1.0)
+
+
+MQTT plugin
+-----------
+bug fixes
+26482 ensure full exception details are logged (since 3.3.5)
+
+
+java client
+-----------
+bug fixes
+26492 fix off-by-one error in PerfTest --cmessages count
+
+
+.net client
+-----------
+bug fixes
+26501 make automatic recovery non-blocking to ensure user-defined handlers are
+ not delayed (since 1.0.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.4.4.txt b/release-notes/README-3.4.4.txt
new file mode 100644
index 0000000000..df09e707ca
--- /dev/null
+++ b/release-notes/README-3.4.4.txt
@@ -0,0 +1,104 @@
+Release: RabbitMQ 3.4.4
+
+You can find RabbitMQ change log at https://www.rabbitmq.com/changelog.html.
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26564 ensure that a mirrored queue declaration only returns when all mirrors
+ are running (since 2.6.0)
+26549 Failure to start if AMQP port > 45535 (since 3.3.0)
+26570 policy change on idle queue might not be reported in a timely manner
+ (since 3.0.0)
+26558 rabbitmq-plugins should respect RABBITMQ_CTL_ERL_ARGS (since 3.4.0)
+26562 rabbitmq-env uses "--fqdn" which is specific to net-tools' (i.e. Linux)
+ hostname(1) (since 3.4.0)
+26144 Windows scripts should respect RABBITMQ_NODE_ONLY (since 1.0.0)
+
+
+building & packaging
+--------------------
+bug fixes
+26443 Mnesia directory world-readable on deb / RPM (since 1.0.0)
+26584 Windows installer should install new version after uninstalling
+ the existing one (since 1.0.0)
+
+
+management plugin
+-----------------
+bug fixes
+26533 Specifying SSL version list for Mochiweb causes
+ rabbit_mgmt_external_stats to crash (since 2.6.0)
+26541 Overview page doesn't work with IE <= 8 (since 3.4.0)
+
+
+Java client
+-----------
+bug fixes
+26552 Bindings for non-durable queues are not recovered (since 3.3.0)
+26580 WorkPool.WorkQueue still deadlock-prone (since 3.4.0)
+26523 PerfTest --cmessages off-by-one error (fail to ack last message)
+ (since 3.4.3)
+
+
+.NET client
+-----------
+bug fixes
+26588 API reference generator should work with .NET 4.0+ (since 1.0.0)
+26590 .NET client .msi installer should work with WiX 4.0 (since 1.0.0)
+
+dependency change
+The client now requires .NET 4.0.
+
+
+MQTT plugin
+-----------
+bug fixes
+26567 Last Will and Testament should be sent in case of keep-alive timeout
+ (since 3.0.0)
+26589 MQTT processor should link its channel (since 3.0.0)
+
+
+STOMP plugin
+------------
+bug fixes
+26553 Unexpected authorisation errors may result in client connections staying open
+26282 Improve error messages for STOMP connection failures
+ (TLS issues, abrupt TCP connection closures) (since 3.3.3)
+26559 STOMP reader should handle system messages (since 1.4.0)
+
+
+AMQP 1.0 plugin
+---------------
+bug fixes
+26587 Failure to create resources when producing / consuming not well
+ handled (since 3.1.0)
+
+
+LDAP plugin
+-----------
+bug fixes
+26528 [LDAP] template replacement should escape \ and & (since 2.3.0)
+
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README-3.5.0.txt b/release-notes/README-3.5.0.txt
new file mode 100644
index 0000000000..6368bcab6c
--- /dev/null
+++ b/release-notes/README-3.5.0.txt
@@ -0,0 +1,170 @@
+Release: RabbitMQ 3.5.0
+
+Release Highlights
+==================
+
+server
+------
+bug fixes
+26527 Prevent huge GM / mirror memory consumption under load by adding flow
+ control to messages via GM (since 2.6.0)
+26636 Fix inconsistencies and hangs when a node comes back online before its
+ disappearance is fulled treated (since 3.1.0)
+26622 Ensure channels don't deliver confirms when a pause mode lasts (since
+ 3.3.5)
+26628 When using autoheal, ensure the leader waits for the winner to finish
+ the autoheal process (since 3.3.0)
+26467 Fix promotion of offline mirrors, in particular if the mirror crashed
+ (since 3.4.0)
+26631 Work around a possible hang in Erlang's "global" (since 3.4.2)
+26614 Ensure rabbitmqctl.bat exits with code 1 if ERLANG_HOME is incorrect
+ (since 1.0.0)
+26426 Ensure epmd is restarted on Windows if it ends up running as a normal
+ user and thus getting killed on logout (since 1.0.0)
+26595 Fix a crash when querying SSL certificate info while the connection is
+ closing (since 2.1.1)
+26610 Restore the timeout error message while waiting for other cluster nodes
+ (since 3.4.0)
+26477 Only send 'user_authentication_success' event if
+ rabbit_reader:auth_phase/2 accepts the user (since 3.3.0)
+
+enhancements
+26183 Move priority queues from an external plugin to the broker
+26327 Embed messages smaller than a configurable size in the queue index
+26457 Add read buffer cache to improve on-disk messages consumption
+26543 Improve I/O performance by reading or writing several file segments
+ in one operation
+26465 New "pause_if_all_down" partition handling mode
+26463 Ensure new mirrors are started when old ones go down when ha-mode=exactly
+26469 Support separate authentication/authorisation backends
+26475 Add "rabbitmqctl rename_cluster_node"
+25430 Further limit queue's journal size to avoid excessive memory use
+26545 Prohibit deletion of amq.* exchanges
+26393 Add more info to "user_authentication_*" events
+26444 Improve performance parsing AMQP tables / arrays
+26602 Add routing decision information to firehose messages
+26615 Notify systemd when RabbitMQ is started, if "sd_notify" is available
+26603 Improve unacked messages requeueing performance in priority queues
+26427 Silence connection errors from load balancer sanity checks
+26471 Log when HiPE is enabled
+
+feature removal
+26257 Remove support for the legacy (2.x compatible) form of the
+ "cluster_nodes" configuration directive
+
+
+management plugin
+-----------------
+bug fixes
+26613 Fix exception on the node details page if the node goes
+ online or offline while viewing (since 3.4.0)
+
+enhancements
+26522 Provide statistics about accesses to message store and queue index
+24781 Provide statistics about file I/O
+24921 rabbitmqadmin: Support Python 3
+25652 Add a "move messages" UI
+26561 Show per-queue disk message read/write rates
+26598 Show cross-cluster networking statistics
+26621 Display a warning when the management database is overloaded
+24700 Support if-unused and if-empty for queue / exchange deletion
+
+
+LDAP plugin
+-----------
+bug fixes
+26601 Ensure tag_queries respects other_bind setting
+
+
+MQTT plugin
+-----------
+enhancements
+26278 Support authentication via SSL certificate
+
+
+Web-STOMP plugin
+----------------
+enhancements
+26504 Add configuration parameter for cowboy connection pool size
+
+
+tracing plugin
+--------------
+enhancements
+26619 Improve how logs are written to disk to increase performance
+26620 Allow tracing plugin to truncate message bodies to increase performance
+
+
+java client
+-----------
+bug fixes
+26576 Make sure Channel#abort ignores IOExceptions as the docs say
+
+enhancements
+26571 Undeprecate QueueingConsumer
+26617 Dynamically calculate number of consumer work service executor threads
+
+feature removal
+26007 Remove deprecated ConnectionFactory#getNumConsumerThreads,
+ ConnectionFactory#setNumConsumerThreads, BasicProperties setters (in
+ favour of BasicProperties.Builder) and Channel#recoveryAsync
+
+
+.net client
+-----------
+bug fixes
+26508 Synchronise SessionManager Count method (since 3.3.5)
+
+enhancements
+24699 Add a unit test to ensure channels are notified when a connection is
+ closed
+26329 Dispatch consumer methods concurrently
+26420 Move the .NET guide to www.rabbitmq.com
+26459 Use timer for heartbeats to reduce the number of threads and memory
+ consumption
+26483 Add ISubscription and IQueueingBasicConsumer interfaces
+26505 Upgrade to Visual Studio 2013 project files
+26507 Use a static exchange types array instead of creating a new list each
+ time (since 3.3.5)
+26509 Switch to auto-properties (since 3.3.5)
+26510 Use a separate lock object in BlockingCell (since 3.3.5)
+26511 Assorted doc string and member name prefix changes (since 3.3.5)
+26512 Use EventHandler<T> and similar instead of homebrew event handler
+ classes (since 3.3.5)
+26513 Improve how authentication method names and URI schemas are compared in
+ ConnectionFactory (since 3.3.5)
+26514 Use TryParse instead of Parse in PrimitiveParser (since 3.3.5)
+26534 Remove MSI installer
+26550 Support TLS connections without client certificates
+
+
+building and packaging
+----------------------
+bug fixes
+26539 Use "exec" to run rabbitmq-server in rabbitmq-script-wrapper to ensure
+ signals are correctly propagated (since 2.8.3)
+26524 Improve error message when build dependencies are missing on Mac OS X
+ (since 3.1.0)
+26525 Do not install rabbitmq.config.example if DOC_INSTALL_DIR is unset
+ (since 3.2.0)
+26526 Replace GNU patch specific "--no-backup-if-mismatch" by a portable
+ combination of patch(1) and find(1) (since 3.4.0)
+
+
+Upgrading
+=========
+To upgrade a non-clustered RabbitMQ from release 2.1.1 or later, simply install
+the new version. All configuration and persistent message data is retained.
+
+To upgrade a clustered RabbitMQ from release 2.1.1 or later, install the new
+version on all the nodes and follow the instructions at
+https://www.rabbitmq.com/clustering.html#upgrading .
+
+To upgrade RabbitMQ from release 2.1.0, first upgrade to 2.1.1 (all data will be
+retained), and then to the current version as described above.
+
+When upgrading from RabbitMQ versions prior to 2.1.0, the existing data will be
+moved to a backup location and a fresh, empty database will be created. A
+warning is recorded in the logs. If your RabbitMQ installation contains
+important data then we recommend you contact support at rabbitmq.com for
+assistance with the upgrade.
diff --git a/release-notes/README.md b/release-notes/README.md
new file mode 100644
index 0000000000..3756ac7386
--- /dev/null
+++ b/release-notes/README.md
@@ -0,0 +1,16 @@
+# Release Notes
+
+This directory contains the source of release notes starting with RabbitMQ `3.5.7`
+as well as archived notes for earlier releases.
+
+The notes are published together with releases [on GitHub](https://github.com/rabbitmq/rabbitmq-server/releases)
+and most commonly discovered via the [change log page](https://rabbitmq.com/changelog.html).
+
+This may include WIP notes for unreleased versions or preview releases.
+
+## Archived Pre-GitHub Notes
+
+This directory contains release notes for releases from `1.1.x` through `3.5.0`,
+which pre-date the migration to GitHub. They are preserved both for historical
+purposes and to keep the [change log](https://rabbitmq.com/changelog.html) links functional,
+so their names were not modified to match the `X.Y.Z.md` pattern.
diff --git a/scripts/bazel/kill_orphaned_ct_run.sh b/scripts/bazel/kill_orphaned_ct_run.sh
new file mode 100755
index 0000000000..db53073bdd
--- /dev/null
+++ b/scripts/bazel/kill_orphaned_ct_run.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+pids=$(ps aux | grep -v awk | awk '/ct_run.*erl/ {print $2}')
+
+set -x
+kill $pids
diff --git a/scripts/bazel/rabbitmq-run.sh b/scripts/bazel/rabbitmq-run.sh
new file mode 100644
index 0000000000..f9494bd696
--- /dev/null
+++ b/scripts/bazel/rabbitmq-run.sh
@@ -0,0 +1,192 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+rmq_realpath() {
+ local path=$1
+
+ if [ -d "$path" ]; then
+ cd "$path" && pwd
+ elif [ -f "$path" ]; then
+ cd "$(dirname "$path")" && echo $(pwd)/$(basename "$path")
+ else
+ echo "$path"
+ fi
+}
+
+if [ -z ${TEST_SRCDIR+x} ]; then
+BASE_DIR=$PWD
+else
+BASE_DIR=$TEST_SRCDIR/$TEST_WORKSPACE
+fi
+
+if [ $1 = "-C" ]; then
+ cd $2
+ shift 2
+fi
+
+for arg in "$@"; do
+ case $arg in
+ run-broker)
+ CMD="$arg"
+ ;;
+ start-background-broker)
+ CMD="$arg"
+ ;;
+ stop-node)
+ CMD="$arg"
+ ;;
+ set-resource-alarm)
+ CMD="$arg"
+ ;;
+ clear-resource-alarm)
+ CMD="$arg"
+ ;;
+ *)
+ export "$arg"
+ ;;
+ esac
+done
+
+DEFAULT_PLUGINS_DIR=${BASE_DIR}/{RABBITMQ_HOME}/plugins
+if [ ! -z ${EXTRA_PLUGINS_DIR+x} ]; then
+ DEFAULT_PLUGINS_DIR=${DEFAULT_PLUGINS_DIR}:${EXTRA_PLUGINS_DIR}
+fi
+
+TEST_TMPDIR=${TEST_TMPDIR:=${TMPDIR}/rabbitmq-test-instances}
+RABBITMQ_SCRIPTS_DIR="$(rmq_realpath ${BASE_DIR}/{RABBITMQ_HOME}/sbin)"
+RABBITMQ_PLUGINS=${RABBITMQ_SCRIPTS_DIR}/rabbitmq-plugins
+RABBITMQ_SERVER=${RABBITMQ_SCRIPTS_DIR}/rabbitmq-server
+RABBITMQCTL=${RABBITMQ_SCRIPTS_DIR}/rabbitmqctl
+
+export RABBITMQ_SCRIPTS_DIR RABBITMQCTL RABBITMQ_PLUGINS RABBITMQ_SERVER
+
+HOSTNAME="$(hostname -s)"
+
+RABBITMQ_NODENAME=${RABBITMQ_NODENAME:=rabbit@${HOSTNAME}}
+RABBITMQ_NODENAME_FOR_PATHS=${RABBITMQ_NODENAME_FOR_PATHS:=${RABBITMQ_NODENAME}}
+NODE_TMPDIR=${TEST_TMPDIR}/${RABBITMQ_NODENAME_FOR_PATHS}
+
+RABBITMQ_BASE=${NODE_TMPDIR}
+RABBITMQ_PID_FILE=${NODE_TMPDIR}/${RABBITMQ_NODENAME_FOR_PATHS}.pid
+RABBITMQ_LOG_BASE=${NODE_TMPDIR}/log
+RABBITMQ_MNESIA_BASE=${NODE_TMPDIR}/mnesia
+RABBITMQ_MNESIA_DIR=${RABBITMQ_MNESIA_BASE}/${RABBITMQ_NODENAME_FOR_PATHS}
+RABBITMQ_QUORUM_DIR=${RABBITMQ_MNESIA_DIR}/quorum
+RABBITMQ_STREAM_DIR=${RABBITMQ_MNESIA_DIR}/stream
+RABBITMQ_PLUGINS_DIR=${RABBITMQ_PLUGINS_DIR:=${DEFAULT_PLUGINS_DIR}}
+RABBITMQ_PLUGINS_EXPAND_DIR=${NODE_TMPDIR}/plugins
+RABBITMQ_FEATURE_FLAGS_FILE=${NODE_TMPDIR}/feature_flags
+RABBITMQ_ENABLED_PLUGINS_FILE=${NODE_TMPDIR}/enabled_plugins
+
+RABBITMQ_SERVER_START_ARGS="${RABBITMQ_SERVER_START_ARGS:=-ra wal_sync_method sync}"
+
+# Enable colourful debug logging by default
+# To change this, set RABBITMQ_LOG to info, notice, warning etc.
+RABBITMQ_LOG=${RABBITMQ_LOG:='debug,+color'}
+export RABBITMQ_LOG
+
+if [ -z ${LEAVE_PLUGINS_DISABLED+x} ]; then
+ RABBITMQ_ENABLED_PLUGINS=${RABBITMQ_ENABLED_PLUGINS:=ALL}
+else
+ RABBITMQ_ENABLED_PLUGINS=${RABBITMQ_ENABLED_PLUGINS:=}
+fi
+
+mkdir -p ${TEST_TMPDIR}
+
+mkdir -p ${RABBITMQ_LOG_BASE}
+mkdir -p ${RABBITMQ_MNESIA_BASE}
+mkdir -p ${RABBITMQ_PLUGINS_EXPAND_DIR}
+
+export \
+ RABBITMQ_NODENAME \
+ RABBITMQ_NODE_IP_ADDRESS \
+ RABBITMQ_BASE \
+ RABBITMQ_PID_FILE \
+ RABBITMQ_LOG_BASE \
+ RABBITMQ_MNESIA_BASE \
+ RABBITMQ_MNESIA_DIR \
+ RABBITMQ_QUORUM_DIR \
+ RABBITMQ_STREAM_DIR \
+ RABBITMQ_FEATURE_FLAGS_FILE \
+ RABBITMQ_PLUGINS_DIR \
+ RABBITMQ_PLUGINS_EXPAND_DIR \
+ RABBITMQ_SERVER_START_ARGS \
+ RABBITMQ_ENABLED_PLUGINS \
+ RABBITMQ_ENABLED_PLUGINS_FILE
+
+write_config_file() {
+cat << EOF > ${RABBITMQ_CONFIG_FILE}
+%% vim:ft=erlang:
+
+[
+ {rabbit, [
+ {loopback_users, []}
+ ]},
+ {rabbitmq_management, [
+ ]},
+ {rabbitmq_mqtt, [
+ ]},
+ {rabbitmq_stomp, [
+ ]},
+ {ra, [
+ {data_dir, "${RABBITMQ_QUORUM_DIR}"},
+ {wal_sync_method, sync}
+ ]},
+ {osiris, [
+ {data_dir, "${RABBITMQ_STREAM_DIR}"}
+ ]}
+].
+EOF
+}
+
+case $CMD in
+ run-broker)
+ export RABBITMQ_ALLOW_INPUT=true
+ export RABBITMQ_CONFIG_FILE=${TEST_TMPDIR}/test.config
+ write_config_file
+ ${RABBITMQ_SCRIPTS_DIR}/rabbitmq-server
+ ;;
+ start-background-broker)
+ RMQCTL_WAIT_TIMEOUT=${RMQCTL_WAIT_TIMEOUT:=60}
+
+ ${RABBITMQ_SCRIPTS_DIR}/rabbitmq-server \
+ > ${RABBITMQ_LOG_BASE}/startup_log \
+ 2> ${RABBITMQ_LOG_BASE}/startup_err &
+
+ # rabbitmqctl wait shells out to 'ps', which is broken in the bazel macOS
+ # sandbox (https://github.com/bazelbuild/bazel/issues/7448)
+ # adding "--spawn_strategy=local" to the invocation is a workaround
+ ${RABBITMQ_SCRIPTS_DIR}/rabbitmqctl \
+ -n ${RABBITMQ_NODENAME} \
+ wait \
+ --timeout ${RMQCTL_WAIT_TIMEOUT} \
+ ${RABBITMQ_PID_FILE}
+
+ {ERLANG_HOME}/bin/erl \
+ -noinput \
+ -eval "true = rpc:call('${RABBITMQ_NODENAME}', rabbit, is_running, []), halt()." \
+ -sname {SNAME} \
+ -hidden
+ ;;
+ stop-node)
+ pid=$(test -f $RABBITMQ_PID_FILE && cat $RABBITMQ_PID_FILE); \
+ test "$pid" && \
+ kill -TERM "$pid" && \
+ echo "waiting for process to exit" && \
+ while ps -p "$pid" >/dev/null 2>&1; do sleep 1; done
+ ;;
+ set-resource-alarm)
+ ERL_LIBS="${BASE_DIR}/{ERL_LIBS}" \
+ ${RABBITMQ_SCRIPTS_DIR}/rabbitmqctl -n ${RABBITMQ_NODENAME} \
+ eval "rabbit_alarm:set_alarm({{resource_limit, ${SOURCE}, node()}, []})."
+ ;;
+ clear-resource-alarm)
+ ERL_LIBS="${BASE_DIR}/{ERL_LIBS}" \
+ ${RABBITMQ_SCRIPTS_DIR}/rabbitmqctl -n ${RABBITMQ_NODENAME} \
+ eval "rabbit_alarm:clear_alarm({resource_limit, ${SOURCE}, node()})."
+ ;;
+ *)
+ echo "rabbitmq-run does not support $CMD"
+ exit 1
+ ;;
+esac
diff --git a/scripts/rabbitmq-server-ha.ocf b/scripts/rabbitmq-server-ha.ocf
deleted file mode 100755
index fd9a3c4b29..0000000000
--- a/scripts/rabbitmq-server-ha.ocf
+++ /dev/null
@@ -1,2423 +0,0 @@
-#!/bin/sh
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# See usage() function below for more details ...
-#
-# Note that the script uses an external file to setup RabbitMQ policies
-# so make sure to create it from an example shipped with the package.
-#
-#######################################################################
-# Initialization:
-
-: ${OCF_FUNCTIONS_DIR=${OCF_ROOT}/lib/heartbeat}
-. ${OCF_FUNCTIONS_DIR}/ocf-shellfuncs
-
-#######################################################################
-
-# Fill in some defaults if no values are specified
-
-PATH=/sbin:/usr/sbin:/bin:/usr/bin
-
-OCF_RESKEY_binary_default="/usr/sbin/rabbitmq-server"
-OCF_RESKEY_ctl_default="/usr/sbin/rabbitmqctl"
-OCF_RESKEY_debug_default=false
-OCF_RESKEY_username_default="rabbitmq"
-OCF_RESKEY_groupname_default="rabbitmq"
-OCF_RESKEY_admin_user_default="guest"
-OCF_RESKEY_admin_password_default="guest"
-OCF_RESKEY_definitions_dump_file_default="/etc/rabbitmq/definitions"
-OCF_RESKEY_pid_file_default="/var/run/rabbitmq/pid"
-OCF_RESKEY_log_dir_default="/var/log/rabbitmq"
-OCF_RESKEY_mnesia_base_default="/var/lib/rabbitmq/mnesia"
-OCF_RESKEY_mnesia_schema_base_default="/var/lib/rabbitmq"
-OCF_RESKEY_host_ip_default="127.0.0.1"
-OCF_RESKEY_node_port_default=5672
-OCF_RESKEY_default_vhost_default="/"
-OCF_RESKEY_erlang_cookie_default=false
-OCF_RESKEY_erlang_cookie_file_default="/var/lib/rabbitmq/.erlang.cookie"
-OCF_RESKEY_use_fqdn_default=false
-OCF_RESKEY_fqdn_prefix_default=""
-OCF_RESKEY_max_rabbitmqctl_timeouts_default=3
-OCF_RESKEY_policy_file_default="/usr/local/sbin/set_rabbitmq_policy"
-OCF_RESKEY_rmq_feature_health_check_default=true
-OCF_RESKEY_rmq_feature_local_list_queues_default=true
-OCF_RESKEY_limit_nofile_default=65535
-OCF_RESKEY_avoid_using_iptables_default=false
-
-: ${HA_LOGTAG="lrmd"}
-: ${HA_LOGFACILITY="daemon"}
-: ${OCF_RESKEY_binary=${OCF_RESKEY_binary_default}}
-: ${OCF_RESKEY_ctl=${OCF_RESKEY_ctl_default}}
-: ${OCF_RESKEY_debug=${OCF_RESKEY_debug_default}}
-: ${OCF_RESKEY_username=${OCF_RESKEY_username_default}}
-: ${OCF_RESKEY_groupname=${OCF_RESKEY_groupname_default}}
-: ${OCF_RESKEY_admin_user=${OCF_RESKEY_admin_user_default}}
-: ${OCF_RESKEY_admin_password=${OCF_RESKEY_admin_password_default}}
-: ${OCF_RESKEY_definitions_dump_file=${OCF_RESKEY_definitions_dump_file_default}}
-: ${OCF_RESKEY_log_dir=${OCF_RESKEY_log_dir_default}}
-: ${OCF_RESKEY_mnesia_base=${OCF_RESKEY_mnesia_base_default}}
-: ${OCF_RESKEY_mnesia_schema_base=${OCF_RESKEY_mnesia_schema_base_default}}
-: ${OCF_RESKEY_pid_file=${OCF_RESKEY_pid_file_default}}
-: ${OCF_RESKEY_node_port=${OCF_RESKEY_node_port_default}}
-: ${OCF_RESKEY_default_vhost=${OCF_RESKEY_default_vhost_default}}
-: ${OCF_RESKEY_erlang_cookie=${OCF_RESKEY_erlang_cookie_default}}
-: ${OCF_RESKEY_erlang_cookie_file=${OCF_RESKEY_erlang_cookie_file_default}}
-: ${OCF_RESKEY_use_fqdn=${OCF_RESKEY_use_fqdn_default}}
-: ${OCF_RESKEY_fqdn_prefix=${OCF_RESKEY_fqdn_prefix_default}}
-: ${OCF_RESKEY_max_rabbitmqctl_timeouts=${OCF_RESKEY_max_rabbitmqctl_timeouts_default}}
-: ${OCF_RESKEY_policy_file=${OCF_RESKEY_policy_file_default}}
-: ${OCF_RESKEY_rmq_feature_health_check=${OCF_RESKEY_rmq_feature_health_check_default}}
-: ${OCF_RESKEY_rmq_feature_local_list_queues=${OCF_RESKEY_rmq_feature_local_list_queues_default}}
-: ${OCF_RESKEY_limit_nofile=${OCF_RESKEY_limit_nofile_default}}
-: ${OCF_RESKEY_avoid_using_iptables=${OCF_RESKEY_avoid_using_iptables_default}}
-
-#######################################################################
-
-OCF_RESKEY_start_time_default=$((OCF_RESKEY_CRM_meta_timeout / 6000 + 2))
-: ${OCF_RESKEY_start_time=${OCF_RESKEY_start_time_default}}
-OCF_RESKEY_stop_time_default=${OCF_RESKEY_start_time_default}
-: ${OCF_RESKEY_stop_time=${OCF_RESKEY_start_time_default}}
-OCF_RESKEY_command_timeout_default=""
-: ${OCF_RESKEY_command_timeout=${OCF_RESKEY_command_timeout_default}}
-TIMEOUT_ARG=$((OCF_RESKEY_CRM_meta_timeout / 6000 + 30))
-COMMAND_TIMEOUT="/usr/bin/timeout ${OCF_RESKEY_command_timeout} ${TIMEOUT_ARG}"
-RESOURCE_NAME=`echo $OCF_RESOURCE_INSTANCE | cut -d ":" -f 1`
-
-#######################################################################
-
-usage() {
- cat <<UEND
- usage: $0 (start|stop|validate-all|meta-data|status|monitor)
-
- $0 manages an ${OCF_RESKEY_binary} process as an HA resource
-
- The 'start' operation starts the networking service.
- The 'stop' operation stops the networking service.
- The 'validate-all' operation reports whether the parameters are valid
- The 'meta-data' operation reports this RA's meta-data information
- The 'status' operation reports whether the networking service is running
- The 'monitor' operation reports whether the networking service seems to be working
-
-UEND
-}
-
-meta_data() {
- # The EXTENDED_OCF_PARAMS parameter below does not exist by default
- # and hence converted to an empty string unless overridden. It
- # could be used by an extention script to add new parameters. For
- # example see https://review.openstack.org/#/c/249180/10
-
- cat <<END
-<?xml version="1.0"?>
-<!DOCTYPE resource-agent SYSTEM "ra-api-1.dtd">
-<resource-agent name="${OCF_RESKEY_binary}">
-<version>1.0</version>
-
-<longdesc lang="en">
-Resource agent for ${OCF_RESKEY_binary}
-</longdesc>
-<shortdesc lang="en">Resource agent for ${OCF_RESKEY_binary}</shortdesc>
-<parameters>
-
-<parameter name="binary" unique="0" required="0">
-<longdesc lang="en">
-RabbitMQ binary
-</longdesc>
-<shortdesc lang="en">RabbitMQ binary</shortdesc>
-<content type="string" default="${OCF_RESKEY_binary_default}" />
-</parameter>
-
-<parameter name="ctl" unique="0" required="0">
-<longdesc lang="en">
-rabbitctl binary
-</longdesc>
-<shortdesc lang="en">rabbitctl binary binary</shortdesc>
-<content type="string" default="${OCF_RESKEY_ctl_default}" />
-</parameter>
-
-<parameter name="pid_file" unique="0" required="0">
-<longdesc lang="en">
-RabbitMQ PID file
-</longdesc>
-<shortdesc lang="en">RabbitMQ PID file</shortdesc>
-<content type="string" default="${OCF_RESKEY_pid_file_default}" />
-</parameter>
-
-<parameter name="log_dir" unique="0" required="0">
-<longdesc lang="en">
-RabbitMQ log directory
-</longdesc>
-<shortdesc lang="en">RabbitMQ log directory</shortdesc>
-<content type="string" default="${OCF_RESKEY_log_dir_default}" />
-</parameter>
-
-<parameter name="username" unique="0" required="0">
-<longdesc lang="en">
-RabbitMQ user name
-</longdesc>
-<shortdesc lang="en">RabbitMQ user name</shortdesc>
-<content type="string" default="${OCF_RESKEY_username_default}" />
-</parameter>
-
-<parameter name="groupname" unique="0" required="0">
-<longdesc lang="en">
-RabbitMQ group name
-</longdesc>
-<shortdesc lang="en">RabbitMQ group name</shortdesc>
-<content type="string" default="${OCF_RESKEY_groupname_default}" />
-</parameter>
-
-<parameter name="admin_user" unique="0" required="0">
-<longdesc lang="en">
-RabbitMQ default admin user for API
-</longdesc>
-<shortdesc lang="en">RabbitMQ admin user</shortdesc>
-<content type="string" default="${OCF_RESKEY_admin_user_default}" />
-</parameter>
-
-<parameter name="admin_password" unique="0" required="0">
-<longdesc lang="en">
-RabbitMQ default admin user password for API
-</longdesc>
-<shortdesc lang="en">RabbitMQ admin password</shortdesc>
-<content type="string" default="${OCF_RESKEY_admin_password_default}" />
-</parameter>
-
-<parameter name="definitions_dump_file" unique="0" required="0">
-<longdesc lang="en">
-RabbitMQ default definitions dump file
-</longdesc>
-<shortdesc lang="en">RabbitMQ definitions dump file</shortdesc>
-<content type="string" default="${OCF_RESKEY_definitions_dump_file}" />
-</parameter>
-
-<parameter name="command_timeout" unique="0" required="0">
-<longdesc lang="en">
-Timeout command arguments for issued commands termination (value is auto evaluated)
-</longdesc>
-<shortdesc lang="en">Arguments for timeout wrapping command</shortdesc>
-<content type="string" default="${OCF_RESKEY_command_timeout_default}" />
-</parameter>
-
-<parameter name="start_time" unique="0" required="0">
-<longdesc lang="en">
-Timeout for start rabbitmq server
-</longdesc>
-<shortdesc lang="en">Timeout for start rabbitmq server</shortdesc>
-<content type="string" default="${OCF_RESKEY_start_time_default}" />
-</parameter>
-
-<parameter name="stop_time" unique="0" required="0">
-<longdesc lang="en">
-Timeout for stopping rabbitmq server
-</longdesc>
-<shortdesc lang="en">Timeout for stopping rabbitmq server</shortdesc>
-<content type="string" default="${OCF_RESKEY_stop_time_default}" />
-</parameter>
-
-<parameter name="debug" unique="0" required="0">
-<longdesc lang="en">
-The debug flag for agent (${OCF_RESKEY_binary}) instance.
-In the /tmp/ directory will be created rmq-* files for log
-some operations and ENV values inside OCF-script.
-</longdesc>
-<shortdesc lang="en">AMQP server (${OCF_RESKEY_binary}) debug flag</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_debug_default}" />
-</parameter>
-
-<parameter name="mnesia_base" unique="0" required="0">
-<longdesc lang="en">
-Base directory for storing Mnesia files
-</longdesc>
-<shortdesc lang="en">Base directory for storing Mnesia files</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_mnesia_base_default}" />
-</parameter>
-
-<parameter name="mnesia_schema_base" unique="0" required="0">
-<longdesc lang="en">
-Parent directory for Mnesia schema directory
-</longdesc>
-<shortdesc lang="en">Parent directory for Mnesia schema directory</shortdesc>
-<content type="string" default="${OCF_RESKEY_mnesia_schema_base_default}" />
-</parameter>
-
-<parameter name="host_ip" unique="0" required="0">
-<longdesc lang="en">
-${OCF_RESKEY_binary} should listen on this IP address
-</longdesc>
-<shortdesc lang="en">${OCF_RESKEY_binary} should listen on this IP address</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_host_ip_default}" />
-</parameter>
-
-<parameter name="node_port" unique="0" required="0">
-<longdesc lang="en">
-${OCF_RESKEY_binary} should listen on this port
-</longdesc>
-<shortdesc lang="en">${OCF_RESKEY_binary} should listen on this port</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_node_port_default}" />
-</parameter>
-
-<parameter name="default_vhost" unique="0" required="0">
-<longdesc lang="en">
-Default virtual host used for monitoring if a node is fully synchronized with
-the rest of the cluster. In normal operation, the resource agent will wait for
-queues from this virtual host on this node to be synchronized elsewhere before
-stopping RabbitMQ. This also means queues in other virtual hosts may not be
-fully synchronized on stop operations.
-</longdesc>
-<shortdesc lang="en">Default virtual host used for waiting for synchronization</shortdesc>
-<content type="string" default="${OCF_RESKEY_default_vhost_default}" />
-</parameter>
-
-<parameter name="erlang_cookie" unique="0" required="0">
-<longdesc lang="en">
-Erlang cookie for clustering. If specified, will be updated at the mnesia reset
-</longdesc>
-<shortdesc lang="en">Erlang cookie</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_erlang_cookie_default}" />
-</parameter>
-
-<parameter name="erlang_cookie_file" unique="0" required="0">
-<longdesc lang="en">
-Erlang cookie file path where the cookie will be put, if requested
-</longdesc>
-<shortdesc lang="en">Erlang cookie file</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_erlang_cookie_file_default}" />
-</parameter>
-
-<parameter name="use_fqdn" unique="0" required="0">
-<longdesc lang="en">
-Either to use FQDN or a shortname for the rabbitmq node
-</longdesc>
-<shortdesc lang="en">Use FQDN</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_use_fqdn_default}" />
-</parameter>
-
-<parameter name="fqdn_prefix" unique="0" required="0">
-<longdesc lang="en">
-Optional FQDN prefix for RabbitMQ nodes in cluster.
-FQDN prefix can be specified to host multiple RabbitMQ instances on a node or
-in case of RabbitMQ running in dedicated network/interface.
-</longdesc>
-<shortdesc lang="en">FQDN prefix</shortdesc>
-<content type="string" default="${OCF_RESKEY_fqdn_prefix_default}" />
-</parameter>
-
-<parameter name="max_rabbitmqctl_timeouts" unique="0" required="0">
-<longdesc lang="en">
-If during monitor call rabbitmqctl times out, the timeout is ignored
-unless it is Nth timeout in a row. Here N is the value of the current parameter.
-If too many timeouts happen in a raw, the monitor call will return with error.
-</longdesc>
-<shortdesc lang="en">Fail only if that many rabbitmqctl timeouts in a row occurred</shortdesc>
-<content type="string" default="${OCF_RESKEY_max_rabbitmqctl_timeouts_default}" />
-</parameter>
-
-<parameter name="policy_file" unique="0" required="0">
-<longdesc lang="en">
-A path to the shell script to setup RabbitMQ policies
-</longdesc>
-<shortdesc lang="en">A policy file path</shortdesc>
-<content type="string" default="${OCF_RESKEY_policy_file_default}" />
-</parameter>
-
-<parameter name="rmq_feature_health_check" unique="0" required="0">
-<longdesc lang="en">
-Since rabbit 3.6.4 list_queues/list_channels-based monitoring should
-be replaced with "node_health_check" command, as it creates no network
-load at all.
-</longdesc>
-<shortdesc lang="en">Use node_health_check for monitoring</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_rmq_feature_health_check_default}" />
-</parameter>
-
-<parameter name="rmq_feature_local_list_queues" unique="0" required="0">
-<longdesc lang="en">
-For rabbit version that implements --local flag for list_queues, this
-can greatly reduce network overhead in cases when node is
-stopped/demoted.
-</longdesc>
-<shortdesc lang="en">Use --local option for list_queues</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_rmq_feature_local_list_queues_default}" />
-</parameter>
-
-<parameter name="limit_nofile" unique="0" required="0">
-<longdesc lang="en">
-Soft and hard limit for NOFILE
-</longdesc>
-<shortdesc lang="en">NOFILE limit</shortdesc>
-<content type="string" default="${OCF_RESKEY_limit_nofile_default}" />
-</parameter>
-
-<parameter name="avoid_using_iptables" unique="0" required="0">
-<longdesc lang="en">
-When set to true the iptables calls to block client access become
-noops. This is useful when we run inside containers.
-</longdesc>
-<shortdesc lang="en">Disable iptables use entirely</shortdesc>
-<content type="boolean" default="${OCF_RESKEY_avoid_using_iptables_default}" />
-</parameter>
-
-$EXTENDED_OCF_PARAMS
-
-</parameters>
-
-<actions>
-<action name="start" timeout="20" />
-<action name="stop" timeout="20" />
-<action name="status" timeout="20" />
-<action name="monitor" depth="0" timeout="30" interval="5" />
-<action name="monitor" depth="0" timeout="30" interval="3" role="Master"/>
-<action name="promote" timeout="30" />
-<action name="demote" timeout="30" />
-<action name="notify" timeout="20" />
-<action name="validate-all" timeout="5" />
-<action name="meta-data" timeout="5" />
-</actions>
-</resource-agent>
-END
-}
-
-
-MIN_MASTER_SCORE=100
-BEST_MASTER_SCORE=1000
-
-
-#######################################################################
-# Functions invoked by resource manager actions
-
-#TODO(bogdando) move proc_kill, proc_stop to shared OCF functions
-# to be shipped with HA cluster packages
-###########################################################
-# Attempts to kill a process with retries and checks procfs
-# to make sure the process is stopped.
-#
-# Globals:
-# LL
-# Arguments:
-# $1 - pid of the process to try and kill
-# $2 - service name used for logging and match-based kill, if the pid is "none"
-# $3 - signal to use, defaults to SIGTERM
-# $4 - number of retries, defaults to 5
-# $5 - time to sleep between retries, defaults to 2
-# Returns:
-# 0 - if successful
-# 1 - if process is still running according to procfs
-# 2 - if invalid parameters passed in
-###########################################################
-proc_kill()
-{
- local pid="${1}"
- local service_name="${2}"
- local signal="${3:-SIGTERM}"
- local count="${4:-5}"
- local process_sleep="${5:-2}"
- local LH="${LL} proc_kill():"
- local pgrp="$(ps -o pgid= ${pid} 2>/dev/null | tr -d '[[:space:]]')"
-
- if [ "${pid}" -a "${pgrp}" = "1" ] ; then
- ocf_log err "${LH} shall not kill by the bad pid 1 (init)!"
- return 2
- fi
-
- if [ "${pid}" = "none" ]; then
- local matched
- matched="$(pgrep -fla ${service_name})"
- if [ -z "${matched}" ] ; then
- ocf_log info "${LH} cannot find any processes matching the ${service_name}, considering target process to be already dead"
- return 0
- fi
- ocf_log debug "${LH} no pid provided, will try the ${service_name}, matched list: ${matched}"
- while [ $count -gt 0 ]; do
- if [ -z "${matched}" ]; then
- break
- else
- matched="$(pgrep -fla ${service_name})"
- ocf_log debug "${LH} Stopping ${service_name} with ${signal}..."
- ocf_run pkill -f -"${signal}" "${service_name}"
- fi
- sleep $process_sleep
- count=$(( count-1 ))
- done
- pgrep -f "${service_name}" > /dev/null
- if [ $? -ne 0 ] ; then
- ocf_log debug "${LH} Stopped ${service_name} with ${signal}"
- return 0
- else
- ocf_log warn "${LH} Failed to stop ${service_name} with ${signal}"
- return 1
- fi
- else
- # pid is not none
- while [ $count -gt 0 ]; do
- if [ ! -d "/proc/${pid}" ]; then
- break
- else
- ocf_log debug "${LH} Stopping ${service_name} with ${signal}..."
- ocf_run pkill -"${signal}" -g "${pgrp}"
- fi
- sleep $process_sleep
- count=$(( count-1 ))
- done
-
- # Check if the process ended after the last sleep
- if [ ! -d "/proc/${pid}" ] ; then
- ocf_log debug "${LH} Stopped ${service_name} with ${signal}"
- return 0
- fi
-
- ocf_log warn "${LH} Failed to stop ${service_name} with ${signal}"
- return 1
- fi
-}
-
-###########################################################
-# Attempts to kill a process with the given pid or pid file
-# using proc_kill and will retry with sigkill if sigterm is
-# unsuccessful.
-#
-# Globals:
-# OCF_ERR_GENERIC
-# OCF_SUCCESS
-# LL
-# Arguments:
-# $1 - pidfile or pid or 'none', if stopping by the name matching
-# $2 - service name used for logging or for the failback stopping method
-# $3 - stop process timeout (in sec), used to determine how many times we try
-# SIGTERM and an upper limit on how long this function should try and
-# stop the process. Defaults to 15.
-# Returns:
-# OCF_SUCCESS - if successful
-# OCF_ERR_GENERIC - if process is still running according to procfs
-###########################################################
-proc_stop()
-{
- local pid_param="${1}"
- local service_name="${2}"
- local timeout="${3:-15}"
- local LH="${LL} proc_stop():"
- local i
- local pid
- local pidfile
- if [ "${pid_param}" = "none" ] ; then
- pid="none"
- else
- # check if provide just a number
- echo "${pid_param}" | egrep -q '^[0-9]+$'
- if [ $? -eq 0 ]; then
- pid="${pid_param}"
- elif [ -e "${pid_param}" ]; then # check if passed in a pid file
- pidfile="${pid_param}"
- pid=$(cat "${pidfile}" 2>/dev/null | tr -s " " "\n" | sort -u)
- else
- ocf_log warn "${LH} pid param ${pid_param} is not a file or a number, try match by ${service_name}"
- pid="none"
- fi
- fi
- # number of times to try a SIGTEM is (timeout - 5 seconds) / 2 seconds
- local stop_count=$(( ($timeout-5)/2 ))
-
- # make sure we stop at least once
- if [ $stop_count -le 0 ]; then
- stop_count=1
- fi
-
- if [ -z "${pid}" ] ; then
- ocf_log warn "${LH} unable to get PID from ${pidfile}, try match by ${service_name}"
- pid="none"
- fi
-
- if [ -n "${pid}" ]; then
- for i in ${pid} ; do
- [ "${i}" ] || break
- ocf_log info "${LH} Stopping ${service_name} by PID ${i}"
- proc_kill "${i}" "${service_name}" SIGTERM $stop_count
- if [ $? -ne 0 ]; then
- # SIGTERM failed, send a single SIGKILL
- proc_kill "${i}" "${service_name}" SIGKILL 1 2
- if [ $? -ne 0 ]; then
- ocf_log err "${LH} ERROR: could not stop ${service_name}"
- return "${OCF_ERR_GENERIC}"
- fi
- fi
- done
- fi
-
- # Remove the pid file here which will remove empty pid files as well
- if [ -n "${pidfile}" ]; then
- rm -f "${pidfile}"
- fi
-
- ocf_log info "${LH} Stopped ${service_name}"
- return "${OCF_SUCCESS}"
-}
-
-# Invokes the given command as a rabbitmq user and wrapped in the
-# timeout command.
-su_rabbit_cmd() {
- local timeout
- if [ "$1" = "-t" ]; then
- timeout="/usr/bin/timeout ${OCF_RESKEY_command_timeout} $2"
- shift 2
- else
- timeout=$COMMAND_TIMEOUT
- fi
- local cmd="${1:-status}"
- local LH="${LL} su_rabbit_cmd():"
- local rc=1
- local user=$OCF_RESKEY_username
- local mail=/var/spool/mail/rabbitmq
- local pwd=/var/lib/rabbitmq
- local home=/var/lib/rabbitmq
-
- ocf_log debug "${LH} invoking a command: ${cmd}"
- su $user -s /bin/sh -c "USER=${user} MAIL=${mail} PWD=${pwd} HOME=${home} LOGNAME=${user} \
- ${timeout} ${cmd}"
- rc=$?
- ocf_log info "${LH} the invoked command exited ${rc}: ${cmd}"
- return $rc
-}
-
-now() {
- date -u +%s
-}
-
-set_limits() {
- local current_limit=$(su $OCF_RESKEY_username -s /bin/sh -c "ulimit -n")
- if [ ! -z $OCF_RESKEY_limit_nofile -a $OCF_RESKEY_limit_nofile -gt $current_limit ] ; then
- ulimit -n $OCF_RESKEY_limit_nofile
- fi
-}
-
-master_score() {
- local LH="${LL} master_score():"
- local score=$1
- if [ -z $score ] ; then
- score=0
- fi
- ocf_log info "${LH} Updating master score attribute with ${score}"
- ocf_run crm_master -N $THIS_PCMK_NODE -l reboot -v $score || return $OCF_ERR_GENERIC
- return $OCF_SUCCESS
-}
-
-# Return either FQDN or shortname, depends on the OCF_RESKEY_use_fqdn.
-get_hostname() {
- if [ "${OCF_RESKEY_use_fqdn}" = 'false' ] ; then
- echo "$(hostname -s)"
- else
- echo "$(hostname -f)"
- fi
-}
-
-# Strip the FQDN to the shortname, if OCF_RESKEY_use_fqdn was set;
-# Prepend prefix to the hostname
-process_fqdn() {
- if [ "${OCF_RESKEY_use_fqdn}" = 'false' ] ; then
- echo "${OCF_RESKEY_fqdn_prefix}$1" | awk -F. '{print $1}'
- else
- echo "${OCF_RESKEY_fqdn_prefix}$1"
- fi
-}
-
-# Return OCF_SUCCESS, if current host is in the list of given hosts.
-# Otherwise, return 10
-my_host() {
- local hostlist="$1"
- local hostname
- local hn
- local rc=10
- local LH="${LL} my_host():"
-
- hostname=$(process_fqdn $(get_hostname))
- ocf_log info "${LH} hostlist is: $hostlist"
- for host in $hostlist ; do
- hn=$(process_fqdn "${host}")
- ocf_log debug "${LH} comparing '$hostname' with '$hn'"
- if [ "${hostname}" = "${hn}" ] ; then
- rc=$OCF_SUCCESS
- break
- fi
- done
-
- return $rc
-}
-
-get_integer_node_attr() {
- local value
- value=$(crm_attribute -N $1 -l reboot --name "$2" --query 2>/dev/null | awk '{ split($3, vals, "="); if (vals[2] != "(null)") print vals[2] }')
- if [ $? -ne 0 -o -z "$value" ] ; then
- value=0
- fi
- echo $value
-}
-
-get_node_start_time() {
- get_integer_node_attr $1 'rabbit-start-time'
-}
-
-get_node_master_score() {
- get_integer_node_attr $1 "master-${RESOURCE_NAME}"
-}
-
-# Return either rabbit node name as FQDN or shortname, depends on the OCF_RESKEY_use_fqdn.
-rabbit_node_name() {
- echo "rabbit@$(process_fqdn $(ocf_attribute_target $1))"
-}
-
-rmq_setup_env() {
- local H
- local dir
- H="$(get_hostname)"
- export RABBITMQ_NODENAME=$(rabbit_node_name $H)
- export RABBITMQ_NODE_PORT=$OCF_RESKEY_node_port
- export RABBITMQ_PID_FILE=$OCF_RESKEY_pid_file
- MNESIA_FILES="${OCF_RESKEY_mnesia_base}/$(rabbit_node_name $H)"
- RMQ_START_TIME="${MNESIA_FILES}/ocf_server_start_time.txt"
- MASTER_FLAG_FILE="${MNESIA_FILES}/ocf_master_for_${OCF_RESOURCE_INSTANCE}"
- THIS_PCMK_NODE=$(ocf_attribute_target)
- TOTALVMEM=`free -mt | awk '/Total:/ {print $2}'`
- # check and make PID file dir
- local PID_DIR=$( dirname $OCF_RESKEY_pid_file )
- if [ ! -d ${PID_DIR} ] ; then
- mkdir -p ${PID_DIR}
- chown -R ${OCF_RESKEY_username}:${OCF_RESKEY_groupname} ${PID_DIR}
- chmod 755 ${PID_DIR}
- fi
-
- # Regardless of whether we just created the directory or it
- # already existed, check whether it is writable by the configured
- # user
- for dir in ${PID_DIR} "${OCF_RESKEY_mnesia_base}" "${OCF_RESKEY_log_dir}"; do
- if test -e ${dir}; then
- local files
- files=$(su -s /bin/sh - $OCF_RESKEY_username -c "find ${dir} ! -writable")
- if [ "${files}" ]; then
- ocf_log warn "Directory ${dir} is not writable by ${OCF_RESKEY_username}, chowning."
- chown -R ${OCF_RESKEY_username}:${OCF_RESKEY_groupname} "${dir}"
- fi
- fi
- done
-
- export LL="${OCF_RESOURCE_INSTANCE}[$$]:"
- update_cookie
-}
-
-# Return a RabbitMQ node to its virgin state.
-# For reset and force_reset to succeed the RabbitMQ application must have been stopped.
-# If the app cannot be stopped, beam will be killed and mnesia files will be removed.
-reset_mnesia() {
- local LH="${LL} reset_mnesia():"
- local make_amnesia=false
- local rc=$OCF_ERR_GENERIC
-
- # check status of a beam process
- get_status
- rc=$?
- if [ $rc -eq 0 ] ; then
- # beam is running
- # check status of rabbit app and stop it, if it is running
- get_status rabbit
- rc=$?
- if [ $rc -eq 0 ] ; then
- # rabbit app is running, have to stop it
- ocf_log info "${LH} Stopping RMQ-app prior to reset the mnesia."
- stop_rmq_server_app
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log warn "${LH} RMQ-app can't be stopped."
- make_amnesia=true
- fi
- fi
-
- if ! $make_amnesia ; then
- # rabbit app is not running, reset mnesia
- ocf_log info "${LH} Execute reset with timeout: ${TIMEOUT_ARG}"
- su_rabbit_cmd "${OCF_RESKEY_ctl} reset"
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log info "${LH} Execute force_reset with timeout: ${TIMEOUT_ARG}"
- su_rabbit_cmd "${OCF_RESKEY_ctl} force_reset"
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log warn "${LH} Mnesia couldn't cleaned, even by force-reset command."
- make_amnesia=true
- fi
- fi
- fi
- else
- # there is no beam running
- make_amnesia=true
- ocf_log warn "${LH} There is no Beam process running."
- fi
-
- # remove mnesia files, if required
- if $make_amnesia ; then
- kill_rmq_and_remove_pid
- ocf_run rm -rf "${MNESIA_FILES}"
- mnesia_schema_location="${OCF_RESKEY_mnesia_schema_base}/Mnesia.$(rabbit_node_name $(get_hostname))"
- ocf_run rm -rf "$mnesia_schema_location"
- ocf_log warn "${LH} Mnesia files appear corrupted and have been removed from ${MNESIA_FILES} and $mnesia_schema_location"
- fi
- # always return OCF SUCCESS
- return $OCF_SUCCESS
-}
-
-
-block_client_access()
-{
- # When OCF_RESKEY_avoid_using_iptables is true iptables calls are noops
- if [ "${OCF_RESKEY_avoid_using_iptables}" == 'true' ] ; then
- return $OCF_SUCCESS
- fi
- # do not add temporary RMQ blocking rule, if it is already exist
- # otherwise, try to add a blocking rule with max of 5 retries
- local tries=5
- until $(iptables -nvL --wait | grep -q 'temporary RMQ block') || [ $tries -eq 0 ]; do
- tries=$((tries-1))
- iptables --wait -I INPUT -p tcp -m tcp --dport ${OCF_RESKEY_node_port} -m state --state NEW,RELATED,ESTABLISHED \
- -m comment --comment 'temporary RMQ block' -j REJECT --reject-with tcp-reset
- sleep 1
- done
- if [ $tries -eq 0 ]; then
- return $OCF_ERR_GENERIC
- else
- return $OCF_SUCCESS
- fi
-}
-
-unblock_client_access()
-{
- # When OCF_RESKEY_avoid_using_iptables is true iptables calls are noops
- if [ "${OCF_RESKEY_avoid_using_iptables}" == 'true' ] ; then
- return
- fi
- # remove all temporary RMQ blocking rules, if there are more than one exist
- for i in $(iptables -nvL --wait --line-numbers | awk '/temporary RMQ block/ {print $1}'); do
- iptables --wait -D INPUT -p tcp -m tcp --dport ${OCF_RESKEY_node_port} -m state --state NEW,RELATED,ESTABLISHED \
- -m comment --comment 'temporary RMQ block' -j REJECT --reject-with tcp-reset
- done
-}
-
-get_nodes__base(){
- local infotype=''
- local rc=$OCF_ERR_GENERIC
- local c_status
-
- if [ "$1" = 'nodes' ]
- then
- infotype='db_nodes'
- elif [ "$1" = 'running' ]
- then
- infotype='running_db_nodes'
- fi
- c_status=`${OCF_RESKEY_ctl} eval "mnesia:system_info(${infotype})." 2>/dev/null`
- rc=$?
- if [ $rc -ne 0 ] ; then
- echo ''
- return $OCF_ERR_GENERIC
- fi
- # translate line like '{running_nodes,['rabbit@node-1','rabbit@node-2','rabbit@node-3']},' to node_list
- echo $(echo "${c_status}" | awk -F, '{ for (i=1;i<=NF;i++) { if ($i ~ /@/) { gsub(/[\[\]}{]/,"",$i); print $i; } }}' | tr -d "\'")
- return $OCF_SUCCESS
-}
-
-get_nodes() {
- echo $(get_nodes__base nodes)
- return $?
-}
-
-get_running_nodes() {
- echo $(get_nodes__base running)
- return $?
-}
-
-# Get alive cluster nodes in visible partition, but the specified one
-get_alive_pacemaker_nodes_but()
-{
- if [ -z "$1" ]; then
- echo `crm_node -l -p | sed -e '/(null)/d'`
- else
- echo `crm_node -l -p | sed -e "s/${1}//g" | sed -e '/(null)/d'`
- fi
-}
-
-# Get current master. If a parameter is provided,
-# do not check node with that name
-get_master_name_but()
-{
- local node
- for node in $(get_alive_pacemaker_nodes_but "$@")
- do
- ocf_log info "${LH} looking if $node is master"
-
- if is_master $node; then
- ocf_log info "${LH} master is $node"
- echo $node
- break
- fi
- done
-}
-
-# Evals some erlang code on current node
-erl_eval() {
- local fmt="${1:?}"
- shift
-
- $COMMAND_TIMEOUT ${OCF_RESKEY_ctl} eval "$(printf "$fmt" "$@")"
-}
-
-# Returns 0 if we are clustered with provideded node
-is_clustered_with()
-{
- local LH="${LH}: is_clustered_with: "
- local node_name
- local rc
- node_name=$(rabbit_node_name $1)
-
- local seen_as_running
- seen_as_running=$(erl_eval "lists:member('%s', rabbit_mnesia:cluster_nodes(running))." "$node_name")
- rc=$?
- if [ "$rc" -ne 0 ]; then
- ocf_log err "${LH} Failed to check whether '$node_name' is considered running by us"
- # We had a transient local error; that doesn't mean the remote node is
- # not part of the cluster, so ignore this
- elif [ "$seen_as_running" != true ]; then
- ocf_log info "${LH} Node $node_name is not running, considering it not clustered with us"
- return 1
- fi
-
- local seen_as_partitioned
- seen_as_partitioned=$(erl_eval "lists:member('%s', rabbit_node_monitor:partitions())." "$node_name")
- rc=$?
- if [ "$rc" -ne 0 ]; then
- ocf_log err "${LH} Failed to check whether '$node_name' is partitioned with us"
- # We had a transient local error; that doesn't mean the remote node is
- # partitioned with us, so ignore this
- elif [ "$seen_as_partitioned" != false ]; then
- ocf_log info "${LH} Node $node_name is partitioned from us"
- return 1
- fi
-
- return $?
-}
-
-
-check_need_join_to() {
- local join_to
- local node
- local running_nodes
- local rc=$OCF_ERR_GENERIC
-
- rc=0
- join_to=$(rabbit_node_name $1)
- running_nodes=$(get_running_nodes)
- for node in $running_nodes ; do
- if [ "${join_to}" = "${node}" ] ; then
- rc=1
- break
- fi
- done
-
- return $rc
-}
-
-# Update erlang cookie, if it has been specified
-update_cookie() {
- local cookie_file_content
- if [ "${OCF_RESKEY_erlang_cookie}" != 'false' ] ; then
- if [ -f "${OCF_RESKEY_erlang_cookie_file}" ]; then
- # First line of cookie file without newline
- cookie_file_content=$(head -n1 "${OCF_RESKEY_erlang_cookie_file}" | perl -pe chomp)
- fi
- # As there is a brief period of time when the file is empty
- # (shell redirection has already opened and truncated file,
- # and echo hasn't finished its job), we are doing this write
- # only when cookie has changed.
- if [ "${OCF_RESKEY_erlang_cookie}" != "${cookie_file_content}" ]; then
- echo "${OCF_RESKEY_erlang_cookie}" > "${OCF_RESKEY_erlang_cookie_file}"
- fi
- # And this are idempotent operations, so we don't have to
- # check any preconditions for running them.
- chown ${OCF_RESKEY_username}:${OCF_RESKEY_groupname} "${OCF_RESKEY_erlang_cookie_file}"
- chmod 600 "${OCF_RESKEY_erlang_cookie_file}"
- fi
- return $OCF_SUCCESS
-}
-
-# Stop rmq beam process by pid and by rabbit node name match. Returns SUCCESS/ERROR
-kill_rmq_and_remove_pid() {
- local LH="${LL} kill_rmq_and_remove_pid():"
- # Stop the rabbitmq-server by its pidfile, use the name matching as a fallback,
- # and ignore the exit code
- proc_stop "${OCF_RESKEY_pid_file}" "beam.*${RABBITMQ_NODENAME}" "${OCF_RESKEY_stop_time}"
- # Ensure the beam.smp stopped by the rabbit node name matching as well
- proc_stop none "beam.*${RABBITMQ_NODENAME}" "${OCF_RESKEY_stop_time}"
- if [ $? -eq 0 ] ; then
- return $OCF_SUCCESS
- else
- return $OCF_ERR_GENERIC
- fi
-}
-
-trim_var(){
- local string="$*"
- echo ${string%% }
-}
-
-action_validate() {
- # todo(sv): validate some incoming parameters
- OCF_RESKEY_CRM_meta_notify_post=$(trim_var $OCF_RESKEY_CRM_meta_notify_post)
- OCF_RESKEY_CRM_meta_notify_pre=$(trim_var $OCF_RESKEY_CRM_meta_notify_pre)
- OCF_RESKEY_CRM_meta_notify_start=$(trim_var $OCF_RESKEY_CRM_meta_notify_start)
- OCF_RESKEY_CRM_meta_notify_stop=$(trim_var $OCF_RESKEY_CRM_meta_notify_stop)
- OCF_RESKEY_CRM_meta_notify_start_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_start_resource)
- OCF_RESKEY_CRM_meta_notify_stop_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_stop_resource)
- OCF_RESKEY_CRM_meta_notify_active_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_active_resource)
- OCF_RESKEY_CRM_meta_notify_inactive_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_inactive_resource)
- OCF_RESKEY_CRM_meta_notify_start_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_start_uname)
- OCF_RESKEY_CRM_meta_notify_stop_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_stop_uname)
- OCF_RESKEY_CRM_meta_notify_active_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_active_uname)
- OCF_RESKEY_CRM_meta_notify_master_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_master_resource)
- OCF_RESKEY_CRM_meta_notify_master_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_master_uname)
- OCF_RESKEY_CRM_meta_notify_demote_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_demote_resource)
- OCF_RESKEY_CRM_meta_notify_demote_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_demote_uname)
- OCF_RESKEY_CRM_meta_notify_slave_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_slave_resource)
- OCF_RESKEY_CRM_meta_notify_slave_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_slave_uname)
- OCF_RESKEY_CRM_meta_notify_promote_resource=$(trim_var $OCF_RESKEY_CRM_meta_notify_promote_resource)
- OCF_RESKEY_CRM_meta_notify_promote_uname=$(trim_var $OCF_RESKEY_CRM_meta_notify_promote_uname)
- return $OCF_SUCCESS
-}
-
-update_rabbit_start_time_if_rc() {
- local nowtime
- local rc=$1
- if [ $rc -eq 0 ]; then
- nowtime="$(now)"
- ocf_log info "${LH} Rabbit app started successfully. Updating start time attribute with ${nowtime}"
- ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-start-time' --update "${nowtime}"
- fi
-}
-
-join_to_cluster() {
- local node="$1"
- local rmq_node
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} join_to_cluster():"
-
- ocf_log info "${LH} start."
-
- rmq_node=$(rabbit_node_name $node)
- ocf_log info "${LH} Joining to cluster by node '${rmq_node}'."
- get_status rabbit
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ] ; then
- ocf_log info "${LH} rabbitmq app will be stopped."
- stop_rmq_server_app
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log err "${LH} Can't stop rabbitmq app by stop_app command. Stopping."
- action_stop
- return $OCF_ERR_GENERIC
- fi
- fi
- ocf_log info "${LH} Execute join_cluster with timeout: ${TIMEOUT_ARG}"
- su_rabbit_cmd "${OCF_RESKEY_ctl} join_cluster $rmq_node"
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log err "${LH} Can't join to cluster by node '${rmq_node}'. Stopping."
- action_stop
- return $OCF_ERR_GENERIC
- fi
- sleep 2
- try_to_start_rmq_app
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log err "${LH} Can't start RMQ app after join to cluster. Stopping."
- action_stop
- return $OCF_ERR_GENERIC
- else
- update_rabbit_start_time_if_rc 0
- ocf_log info "${LH} Joined to cluster succesfully."
- fi
-
- ocf_log info "${LH} end."
- return $rc
-}
-
-unjoin_nodes_from_cluster() {
- # node names of the nodes where the pcs resource is being stopped
- local nodelist="$1"
- local hostname
- local nodename
- local rc=$OCF_ERR_GENERIC
- local rnode
- # nodes in rabbit cluster db
- local nodes_in_cluster
- local LH="${LL} unjoin_nodes_from_cluster():"
-
- nodes_in_cluster=$(get_nodes)
- rc=$?
- if [ $rc -ne 0 ] ; then
- # no nodes in node list, nothing to do
- return $OCF_SUCCESS
- fi
-
- # unjoin all cluster nodes which are being stopped (i.e. recieved post-stop notify), except *this* node
- # before to unjoin the nodes, make sure they were disconnected from *this* node
- for hostname in $nodelist ; do
- nodename=$(rabbit_node_name $hostname)
- if [ "${nodename}" = "${RABBITMQ_NODENAME}" ] ; then
- continue
- fi
- for rnode in $nodes_in_cluster ; do
- if [ "${nodename}" = "${rnode}" ] ; then
- # disconnect node being unjoined from this node
- ocf_run ${OCF_RESKEY_ctl} eval "disconnect_node(list_to_atom(\"${nodename}\"))." 2>&1
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ] ; then
- ocf_log info "${LH} node '${nodename}' disconnected succesfully."
- else
- ocf_log info "${LH} disconnecting node '${nodename}' failed."
- fi
-
- # unjoin node
- # when the rabbit node went down, its status
- # remains 'running' for a while, so few retries are required
- local tries=0
- until [ $tries -eq 5 ]; do
- tries=$((tries+1))
- if is_clustered_with $nodename; then
- ocf_log info "${LH} the ${nodename} is alive and cannot be kicked from the cluster yet"
- else
- break
- fi
- sleep 10
- done
- ocf_log info "${LH} Execute forget_cluster_node with timeout: ${TIMEOUT_ARG}"
- su_rabbit_cmd "${OCF_RESKEY_ctl} forget_cluster_node ${nodename}"
- rc=$?
- if [ $rc -eq 0 ] ; then
- ocf_log info "${LH} node '${nodename}' unjoined succesfully."
- else
- ocf_log warn "${LH} unjoining node '${nodename}' failed."
- fi
- fi
- done
- done
- return $OCF_SUCCESS
-}
-
-# Stop RMQ beam server process. Returns SUCCESS/ERROR
-stop_server_process() {
- local pid
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} stop_server_process():"
-
- pid=$(cat ${OCF_RESKEY_pid_file})
- rc=$?
- if [ $rc -ne 0 ] ; then
- # Try to stop without known PID
- ocf_log err "${LH} RMQ-server process PIDFILE was not found!"
- su_rabbit_cmd "${OCF_RESKEY_ctl} stop >> \"${OCF_RESKEY_log_dir}/shutdown_log\" 2>&1"
- if [ $? -eq 0 ] ; then
- ocf_log info "${LH} RMQ-server process stopped succesfully, although there was no PIDFILE found."
- ocf_log info "${LH} grant a graceful termintation window ${OCF_RESKEY_stop_time} to end its beam"
- sleep "${OCF_RESKEY_stop_time}"
- else
- kill_rmq_and_remove_pid
- fi
- elif [ "${pid}" ] ; then
- # Try to stop gracefully by known PID
- ocf_log info "${LH} Execute stop with timeout: ${TIMEOUT_ARG}"
- su_rabbit_cmd "${OCF_RESKEY_ctl} stop ${OCF_RESKEY_pid_file} >> \"${OCF_RESKEY_log_dir}/shutdown_log\" 2>&1"
- [ $? -eq 0 ] && ocf_log info "${LH} RMQ-server process (PID=${pid}) stopped succesfully."
- fi
-
- # Ensure there is no beam process and pidfile left
- pgrep -f "beam.*${RABBITMQ_NODENAME}" > /dev/null
- rc=$?
- if [ -f ${OCF_RESKEY_pid_file} -o $rc -eq 0 ] ; then
- ocf_log warn "${LH} The pidfile or beam's still exist, forcing the RMQ-server cleanup"
- kill_rmq_and_remove_pid
- return $?
- else
- return $OCF_SUCCESS
- fi
-}
-
-# Stop RMQ-app. Return OCF_SUCCESS, if the app was stopped,
-# otherwise return OCF_ERR_GENERIC
-stop_rmq_server_app() {
- local rc=$OCF_ERR_GENERIC
-
- # if the beam process isn't running, then rabbit app is stopped as well
- get_status
- rc=$?
- if [ $rc -ne 0 ] ; then
- return $OCF_SUCCESS
- fi
-
- # stop the app
- ocf_log info "${LH} Execute stop_app with timeout: ${TIMEOUT_ARG}"
- su_rabbit_cmd "${OCF_RESKEY_ctl} stop_app >> \"${OCF_RESKEY_log_dir}/shutdown_log\" 2>&1"
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log err "${LH} RMQ-server app cannot be stopped."
- return $OCF_ERR_GENERIC
- fi
-
- get_status rabbit
- rc=$?
- if [ $rc -ne $OCF_SUCCESS ] ; then
- ocf_log info "${LH} RMQ-server app stopped succesfully."
- rc=$OCF_SUCCESS
- else
- ocf_log err "${LH} RMQ-server app cannot be stopped."
- rc=$OCF_ERR_GENERIC
- fi
-
- return $rc
-}
-
-start_beam_process() {
- local command
- local rc=$OCF_ERR_GENERIC
- local ts_end
- local pf_end
- local pid
- local LH="${LL} start_beam_process():"
-
- # remove old PID-file if it exists
- if [ -f "${OCF_RESKEY_pid_file}" ] ; then
- ocf_log warn "${LH} found old PID-file '${OCF_RESKEY_pid_file}'."
- pid=$(cat ${OCF_RESKEY_pid_file})
- if [ "${pid}" -a -d "/proc/${pid}" ] ; then
- ocf_run cat /proc/${pid}/cmdline | grep -c 'bin/beam' > /dev/null 2>&1
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ] ; then
- ocf_log warn "${LH} found beam process with PID=${pid}, killing...'."
- ocf_run kill -TERM $pid
- else
- ocf_log err "${LH} found unknown process with PID=${pid} from '${OCF_RESKEY_pid_file}'."
- return $OCF_ERR_GENERIC
- fi
- fi
- ocf_run rm -f $OCF_RESKEY_pid_file
- fi
-
- [ -f /etc/default/rabbitmq-server ] && . /etc/default/rabbitmq-server
-
- # RabbitMQ requires high soft and hard limits for NOFILE
- set_limits
-
- # run beam process
- command="${OCF_RESKEY_binary} >> \"${OCF_RESKEY_log_dir}/startup_log\" 2>/dev/null"
- RABBITMQ_NODE_ONLY=1 su rabbitmq -s /bin/sh -c "${command}"&
- ts_end=$(( $(now) + ${OCF_RESKEY_start_time} ))
- rc=$OCF_ERR_GENERIC
- while [ $(now) -lt ${ts_end} ]; do
- # waiting for normal start of beam
- pid=0
- pf_end=$(( $(now) + 3 ))
- while [ $(now) -lt ${pf_end} ]; do
- # waiting for OCF_RESKEY_pid_file of beam process
- if [ -f "${OCF_RESKEY_pid_file}" ] ; then
- pid=$(cat ${OCF_RESKEY_pid_file})
- break
- fi
- sleep 1
- done
- if [ "${pid}" != "0" -a -d "/proc/${pid}" ] ; then
- rc=$OCF_SUCCESS
- break
- fi
- sleep 2
- done
- if [ $rc -ne $OCF_SUCCESS ]; then
- if [ "${pid}" = "0" ] ; then
- ocf_log warn "${LH} PID-file '${OCF_RESKEY_pid_file}' not found"
- fi
- ocf_log err "${LH} RMQ-runtime (beam) didn't start succesfully (rc=${rc})."
- fi
-
- return $rc
-}
-
-check_plugins() {
- # Check if it's safe to load plugins and if we need to do so. Logic is:
- # if (EnabledPlugins > 0) and (ActivePlugins == 0) ; then it's safe to load
- # If we have at least one active plugin, then it's not safe to re-load them
- # because plugins:setup() would remove existing dependency plugins in plugins_expand_dir.
- ${OCF_RESKEY_ctl} eval '{ok, EnabledFile} = application:get_env(rabbit, enabled_plugins_file), EnabledPlugins = rabbit_plugins:read_enabled(EnabledFile), ActivePlugins = rabbit_plugins:active(), if length(EnabledPlugins)>0 -> if length(ActivePlugins)==0 -> erlang:error("need_to_load_plugins"); true -> false end; true -> false end.'
- return $?
-}
-
-load_plugins() {
- check_plugins
- local rc=$?
- if [ $rc -eq 0 ] ; then
- return 0
- else
- ${OCF_RESKEY_ctl} eval 'ToBeLoaded = rabbit_plugins:setup(), ok = app_utils:load_applications(ToBeLoaded), StartupApps = app_utils:app_dependency_order(ToBeLoaded,false), app_utils:start_applications(StartupApps).'
- return $?
- fi
-}
-
-list_active_plugins() {
- local list
- list=`${OCF_RESKEY_ctl} eval 'rabbit_plugins:active().'`
- echo "${list}"
-}
-
-try_to_start_rmq_app() {
- local startup_log="${1:-${OCF_RESKEY_log_dir}/startup_log}"
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} try_to_start_rmq_app():"
-
- get_status
- rc=$?
- if [ $rc -ne $OCF_SUCCESS ] ; then
- ocf_log info "${LH} RMQ-runtime (beam) not started, starting..."
- start_beam_process
- rc=$?
- if [ $rc -ne $OCF_SUCCESS ]; then
- ocf_log err "${LH} Failed to start beam - returning from the function"
- return $OCF_ERR_GENERIC
- fi
- fi
-
-
- if [ -z "${startup_log}" ] ; then
- startup_log="${OCF_RESKEY_log_dir}/startup_log"
- fi
-
- ocf_log info "${LH} begin."
- ocf_log info "${LH} Execute start_app with timeout: ${TIMEOUT_ARG}"
- su_rabbit_cmd "${OCF_RESKEY_ctl} start_app >>${startup_log} 2>&1"
- rc=$?
- if [ $rc -eq 0 ] ; then
- ocf_log info "${LH} start_app was successful."
- ocf_log info "${LH} waiting for start to finish with timeout: ${TIMEOUT_ARG}"
- su_rabbit_cmd "${OCF_RESKEY_ctl} wait ${OCF_RESKEY_pid_file}"
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log err "${LH} RMQ-server app failed to wait for start."
- return $OCF_ERR_GENERIC
- fi
- rc=$OCF_SUCCESS
- # Loading enabled modules
- ocf_log info "${LH} start plugins."
- load_plugins
- local mrc=$?
- if [ $mrc -eq 0 ] ; then
- local mlist
- mlist=`list_active_plugins`
- ocf_log info "${LH} Starting plugins: ${mlist}"
- else
- ocf_log info "${LH} Starting plugins: failed."
- fi
- else
- ocf_log info "${LH} start_app failed."
- rc=$OCF_ERR_GENERIC
- fi
- return $rc
-}
-
-start_rmq_server_app() {
- local rc=$OCF_ERR_GENERIC
- local startup_log="${OCF_RESKEY_log_dir}/startup_log"
- local startup_output
- local LH="${LL} start_rmq_server_app():"
- local a
-
- #We are performing initial start check.
- #We are not ready to provide service.
- #Clients should not have access.
-
-
- ocf_log info "${LH} begin."
- # Safe-unblock the rules, if there are any
- unblock_client_access
- # Apply the blocking rule
- block_client_access
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ]; then
- ocf_log info "${LH} blocked access to RMQ port"
- else
- ocf_log err "${LH} cannot block access to RMQ port!"
- return $OCF_ERR_GENERIC
- fi
- get_status
- rc=$?
- if [ $rc -ne $OCF_SUCCESS ] ; then
- ocf_log info "${LH} RMQ-runtime (beam) not started, starting..."
- start_beam_process
- rc=$?
- if [ $rc -ne $OCF_SUCCESS ]; then
- unblock_client_access
- ocf_log info "${LH} unblocked access to RMQ port"
- return $OCF_ERR_GENERIC
- fi
- fi
-
- ocf_log info "${LH} RMQ-server app not started, starting..."
- try_to_start_rmq_app "$startup_log"
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ] ; then
- # rabbitmq-server started successfuly as master of cluster
- master_score $MIN_MASTER_SCORE
- stop_rmq_server_app
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log err "${LH} RMQ-server app can't be stopped. Beam will be killed."
- kill_rmq_and_remove_pid
- unblock_client_access
- ocf_log info "${LH} unblocked access to RMQ port"
- return $OCF_ERR_GENERIC
- fi
- else
- # error at start RMQ-server
- ocf_log warn "${LH} RMQ-server app can't start without Mnesia cleaning."
- for a in $(seq 1 10) ; do
- rc=$OCF_ERR_GENERIC
- reset_mnesia || break
- try_to_start_rmq_app "$startup_log"
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ]; then
- stop_rmq_server_app
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ]; then
- ocf_log info "${LH} RMQ-server app Mnesia cleaned successfully."
- rc=$OCF_SUCCESS
- master_score $MIN_MASTER_SCORE
- break
- else
- ocf_log err "${LH} RMQ-server app can't be stopped during Mnesia cleaning. Beam will be killed."
- kill_rmq_and_remove_pid
- unblock_client_access
- ocf_log info "${LH} unblocked access to RMQ port"
- return $OCF_ERR_GENERIC
- fi
- fi
- done
- fi
- if [ $rc -eq $OCF_ERR_GENERIC ] ; then
- ocf_log err "${LH} RMQ-server can't be started while many tries. Beam will be killed."
- kill_rmq_and_remove_pid
- fi
- ocf_log info "${LH} end."
- unblock_client_access
- ocf_log info "${LH} unblocked access to RMQ port"
- return $rc
-}
-
-# check status of rabbit beam process or a rabbit app, if rabbit arg specified
-# by default, test if the kernel app is running, otherwise consider it is "not running"
-get_status() {
- local what="${1:-kernel}"
- local rc=$OCF_NOT_RUNNING
- local LH="${LL} get_status():"
- local body
- local beam_running
-
- body=$( ${COMMAND_TIMEOUT} ${OCF_RESKEY_ctl} eval 'rabbit_misc:which_applications().' 2>&1 )
- rc=$?
-
- pgrep -f "beam.*${RABBITMQ_NODENAME}" > /dev/null
- beam_running=$?
- # report not running only if the which_applications() reported an error AND the beam is not running
- if [ $rc -ne 0 -a $beam_running -ne 0 ] ; then
- ocf_log info "${LH} failed with code ${rc}. Command output: ${body}"
- return $OCF_NOT_RUNNING
- # return a generic error, if there were errors and beam is found running
- elif [ $rc -ne 0 ] ; then
- ocf_log info "${LH} found the beam process running but failed with code ${rc}. Command output: ${body}"
- return $OCF_ERR_GENERIC
- fi
-
- # try to parse the which_applications() output only if it exited w/o errors
- if [ "${what}" -a $rc -eq 0 ] ; then
- rc=$OCF_NOT_RUNNING
- echo "$body" | grep "\{${what}," > /dev/null 2>&1 && rc=$OCF_SUCCESS
-
- if [ $rc -ne $OCF_SUCCESS ] ; then
- ocf_log info "${LH} app ${what} was not found in command output: ${body}"
- fi
- fi
-
- [ $rc -ne $OCF_SUCCESS ] && rc=$OCF_NOT_RUNNING
- return $rc
-}
-
-action_status() {
- local rc=$OCF_ERR_GENERIC
-
- get_status
- rc=$?
- return $rc
-}
-
-# return 0, if given node has a master attribute in CIB,
-# otherwise, return 1
-is_master() {
- local result
- result=`crm_attribute -N "${1}" -l reboot --name 'rabbit-master' --query 2>/dev/null |\
- awk '{print $3}' | awk -F "=" '{print $2}' | sed -e '/(null)/d'`
- if [ "${result}" != 'true' ] ; then
- return 1
- fi
- return 0
-}
-
-# Verify if su_rabbit_cmd exited by timeout by checking its return code.
-# If it did not, return 0. If it did AND it is
-# $OCF_RESKEY_max_rabbitmqctl_timeouts'th timeout in a row,
-# return 2 to signal get_monitor that it should
-# exit with error. Otherwise return 1 to signal that there was a timeout,
-# but it should be ignored. Timeouts for different operations are tracked
-# separately. The second argument is used to distingush them.
-check_timeouts() {
- local op_rc=$1
- local timeouts_attr_name=$2
- local op_name=$3
-
- # 75 is EX_TEMPFAIL from sysexits, and is used by rabbitmqctl to signal about
- # timeout.
- if [ $op_rc -ne 124 -a $op_rc -ne 137 -a $op_rc -ne 75 ]; then
- ocf_update_private_attr $timeouts_attr_name 0
- return 0
- fi
-
- local count
- count=$(ocf_get_private_attr $timeouts_attr_name 0)
-
- count=$((count+1))
- # There is a slight chance that this piece of code will be executed twice simultaneously.
- # As a result, $timeouts_attr_name's value will be one less than it should be. But we don't need
- # precise calculation here.
- ocf_update_private_attr $timeouts_attr_name $count
-
- if [ $count -lt $OCF_RESKEY_max_rabbitmqctl_timeouts ]; then
- ocf_log warn "${LH} 'rabbitmqctl $op_name' timed out $count of max. $OCF_RESKEY_max_rabbitmqctl_timeouts time(s) in a row. Doing nothing for now."
- return 1
- else
- ocf_log err "${LH} 'rabbitmqctl $op_name' timed out $count of max. $OCF_RESKEY_max_rabbitmqctl_timeouts time(s) in a row and is not responding. The resource is failed."
- return 2
- fi
-}
-
-wait_sync() {
- local wait_time=$1
- local queues
- local opt_arg=""
-
- if [ "$OCF_RESKEY_rmq_feature_local_list_queues" = "true" ]; then
- opt_arg="--local"
- fi
-
- queues="${COMMAND_TIMEOUT} ${OCF_RESKEY_ctl} -p ${OCF_RESKEY_default_vhost} list_queues $opt_arg name state"
-
- su_rabbit_cmd -t "${wait_time}" "sh -c \"while ${queues} | grep -q 'syncing,'; \
- do sleep 2; done\""
-
- return $?
-}
-
-get_monitor() {
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} get_monitor():"
- local status_master=1
- local rabbit_running
- local name
- local node
- local node_start_time
- local nowtime
- local partitions_report
- local node_partitions
-
- ocf_log info "${LH} CHECK LEVEL IS: ${OCF_CHECK_LEVEL}"
- get_status
- rc=$?
- if [ $rc -eq $OCF_NOT_RUNNING ] ; then
- ocf_log info "${LH} get_status() returns ${rc}."
- ocf_log info "${LH} ensuring this slave does not get promoted."
- master_score 0
- return $OCF_NOT_RUNNING
- elif [ $rc -eq $OCF_SUCCESS ] ; then
- ocf_log info "${LH} get_status() returns ${rc}."
- ocf_log info "${LH} also checking if we are master."
- get_status rabbit
- rabbit_running=$?
- is_master $THIS_PCMK_NODE
- status_master=$?
- ocf_log info "${LH} master attribute is ${status_master}"
- if [ $status_master -eq 0 -a $rabbit_running -eq $OCF_SUCCESS ]
- then
- ocf_log info "${LH} We are the running master"
- rc=$OCF_RUNNING_MASTER
- elif [ $status_master -eq 0 -a $rabbit_running -ne $OCF_SUCCESS ] ; then
- ocf_log err "${LH} We are the master and RMQ-runtime (beam) is not running. this is a failure"
- exit $OCF_FAILED_MASTER
- fi
- fi
- get_status rabbit
- rabbit_running=$?
- ocf_log info "${LH} checking if rabbit app is running"
-
- if [ $rc -eq $OCF_RUNNING_MASTER ]; then
- if [ $rabbit_running -eq $OCF_SUCCESS ]; then
- ocf_log info "${LH} rabbit app is running and is master of cluster"
- else
- ocf_log err "${LH} we are the master and rabbit app is not running. This is a failure"
- exit $OCF_FAILED_MASTER
- fi
- else
- start_time=$((180 + $(ocf_get_private_attr 'rabbit-start-phase-1-time' 0)))
- restart_order_time=$((60 + $(ocf_get_private_attr 'rabbit-ordered-to-restart' 0)))
- nowtime=$(now)
-
- # If we started more than 3 minutes ago, and
- # we got order to restart less than 1 minute ago
- if [ $nowtime -lt $restart_order_time ]; then
- if [ $nowtime -gt $start_time ]; then
- ocf_log err "${LH} failing because we have received an order to restart from the master"
- stop_server_process
- rc=$OCF_ERR_GENERIC
- else
- ocf_log warn "${LH} received an order to restart from the master, ignoring it because we have just started"
- fi
- fi
- fi
-
- if [ $rc -eq $OCF_ERR_GENERIC ]; then
- ocf_log err "${LH} get_status() returns generic error ${rc}"
- ocf_log info "${LH} ensuring this slave does not get promoted."
- master_score 0
- return $OCF_ERR_GENERIC
- fi
-
- # Recounting our master score
- ocf_log info "${LH} preparing to update master score for node"
- local our_start_time
- local new_score
- local node_start_time
- local node_score
-
- our_start_time=$(get_node_start_time $THIS_PCMK_NODE)
-
- if [ $our_start_time -eq 0 ]; then
- new_score=$MIN_MASTER_SCORE
- else
- new_score=$BEST_MASTER_SCORE
- for node in $(get_alive_pacemaker_nodes_but $THIS_PCMK_NODE)
- do
- node_start_time=$(get_node_start_time $node)
- node_score=$(get_node_master_score $node)
-
- ocf_log info "${LH} comparing us (start time: $our_start_time, score: $new_score) with $node (start time: $node_start_time, score: $node_score)"
- if [ $node_start_time -ne 0 -a $node_score -ne 0 -a $node_start_time -lt $our_start_time ]; then
- new_score=$((node_score - 10 < new_score ? node_score - 10 : new_score ))
- elif [ $node_start_time -ne 0 -a $node_score -ne 0 -a $node_start_time -eq $our_start_time ]; then
- # Do not get promoted if the other node is already master and we have the same start time
- if is_master $node; then
- new_score=$((node_score - 10 < new_score ? node_score - 10 : new_score ))
- fi
- fi
- done
- fi
-
- if [ "$new_score" -ne "$(get_node_master_score $THIS_PCMK_NODE)" ]; then
- master_score $new_score
- fi
- ocf_log info "${LH} our start time is $our_start_time and score is $new_score"
-
- # Skip all other checks if rabbit app is not running
- if [ $rabbit_running -ne $OCF_SUCCESS ]; then
- ocf_log info "${LH} RabbitMQ is not running, get_monitor function ready to return ${rc}"
- return $rc
- fi
-
- # rc can be SUCCESS or RUNNING_MASTER, don't touch it unless there
- # is some error uncovered by node_health_check
- if ! node_health_check; then
- rc=$OCF_ERR_GENERIC
- fi
-
- if [ $rc -eq $OCF_RUNNING_MASTER ] ; then
- # If we are the master and healthy, perform various
- # connectivity checks for other nodes in the cluster.
- # Order a member to restart if something fishy happens with it.
- # All cross-node checks MUST happen only here.
-
- partitions_report="$(partitions_report)"
-
- for node in $(get_alive_pacemaker_nodes_but $THIS_PCMK_NODE); do
- # Restart node if we don't consider ourselves clustered with it
- if ! is_clustered_with $node; then
- ocf_log warn "${LH} node $node is not connected with us"
- order_node_restart "$node"
- continue
- fi
-
- # Restart node if it has any unresolved partitions
- node_partitions=$(grep_partitions_report $node "$partitions_report")
- if [ ! -z "$node_partitions" ]; then
- ocf_log warn "${LH} Node $node thinks that it is partitoned with $node_partitions"
- order_node_restart "$node"
- continue
- fi
- done
- fi
-
- ocf_log info "${LH} get_monitor function ready to return ${rc}"
- return $rc
-}
-
-order_node_restart() {
- local node=${1:?}
- ocf_log warn "${LH} Ordering node '$node' to restart"
- ocf_update_private_attr 'rabbit-ordered-to-restart' "$(now)" "$node"
-}
-
-# Checks whether node is mentioned somewhere in report returned by
-# partitions_report()
-grep_partitions_report() {
- local node="${1:?}"
- local report="${2:?}"
- local rabbit_node
- rabbit_node=$(rabbit_node_name "$node")
- echo "$report" | grep "PARTITIONED $rabbit_node:" | sed -e 's/^[^:]\+: //'
-}
-
-# Report partitions (if any) from viewpoint of every running node in cluster.
-# It is parseable/grepable version of `rabbitmqctl cluster_status`.
-#
-# If node sees partition, report will contain the line like:
-# PARTITIONED node-name: list-of-nodes, which-node-name-considers, itself-partitioned-with
-partitions_report() {
- $COMMAND_TIMEOUT xargs -0 ${OCF_RESKEY_ctl} eval <<EOF
-RpcTimeout = 10,
-
-Nodes = rabbit_mnesia:cluster_nodes(running),
-
-{Replies, _BadNodes} = gen_server:multi_call(Nodes, rabbit_node_monitor, partitions, RpcTimeout * 1000),
-
-lists:foreach(fun ({_, []}) -> ok;
- ({Node, Partitions}) ->
- PartitionsStr = string:join([atom_to_list(Part) || Part <- Partitions],
- ", "),
- io:format("PARTITIONED ~s: ~s~n",
- [Node, PartitionsStr])
- end, Replies),
-
-ok.
-EOF
-}
-
-# Check if the rabbitmqctl control plane is alive.
-node_health_check() {
- local rc
- if [ "$OCF_RESKEY_rmq_feature_health_check" = true ]; then
- node_health_check_local
- rc=$?
- else
- node_health_check_legacy
- rc=$?
- fi
- return $rc
-}
-
-node_health_check_local() {
- local LH="${LH} node_health_check_local():"
- local rc
- local rc_timeouts
-
- # Give node_health_check some time to handle timeout by itself.
- # By using internal rabbitmqctl timeouts, we allow it to print
- # more useful diagnostics
- local timeout=$((TIMEOUT_ARG - 2))
- su_rabbit_cmd "${OCF_RESKEY_ctl} node_health_check -t $timeout"
- rc=$?
-
- check_timeouts $rc "rabbit_node_health_check_timeouts" "node_health_check"
- rc_timeouts=$?
-
- if [ "$rc_timeouts" -eq 2 ]; then
- master_score 0
- ocf_log info "${LH} node_health_check timed out, retry limit reached"
- return $OCF_ERR_GENERIC
- elif [ "$rc_timeouts" -eq 1 ]; then
- ocf_log info "${LH} node_health_check timed out, going to retry"
- return $OCF_SUCCESS
- fi
-
- if [ "$rc" -ne 0 ]; then
- ocf_log err "${LH} rabbitmqctl node_health_check exited with errors."
- return $OCF_ERR_GENERIC
- else
- return $OCF_SUCCESS
- fi
-}
-
-node_health_check_legacy() {
- local rc_alive
- local timeout_alive
- su_rabbit_cmd "${OCF_RESKEY_ctl} list_channels > /dev/null 2>&1"
- rc_alive=$?
- [ $rc_alive -eq 137 -o $rc_alive -eq 124 ] && ocf_log err "${LH} 'rabbitmqctl list_channels' timed out, per-node explanation: $(enhanced_list_channels)"
- check_timeouts $rc_alive "rabbit_list_channels_timeouts" "list_channels"
- timeout_alive=$?
-
- if [ $timeout_alive -eq 2 ]; then
- master_score 0
- return $OCF_ERR_GENERIC
- elif [ $timeout_alive -eq 0 ]; then
- if [ $rc_alive -ne 0 ]; then
- ocf_log err "${LH} rabbitmqctl list_channels exited with errors."
- rc=$OCF_ERR_GENERIC
- fi
- fi
-
- # Check for memory alarms for this Master or Slave node.
- # If alert found, reset the alarm
- # and restart the resource as it likely means a dead end situation
- # when rabbitmq cluster is running with blocked publishing due
- # to high memory watermark exceeded.
- local alarms
- local rc_alarms
- local timeout_alarms
- alarms=`su_rabbit_cmd "${OCF_RESKEY_ctl} -q eval 'rabbit_alarm:get_alarms().'"`
- rc_alarms=$?
- check_timeouts $rc_alarms "rabbit_get_alarms_timeouts" "get_alarms"
- timeout_alarms=$?
-
- if [ $timeout_alarms -eq 2 ]; then
- master_score 0
- return $OCF_ERR_GENERIC
-
- elif [ $timeout_alarms -eq 0 ]; then
- if [ $rc_alarms -ne 0 ]; then
- ocf_log err "${LH} rabbitmqctl get_alarms exited with errors."
- rc=$OCF_ERR_GENERIC
-
- elif [ -n "${alarms}" ]; then
- for node in ${alarms}; do
- name=`echo ${node} | perl -n -e "m/memory,'(?<n>\S+)+'/ && print \"$+{n}\n\""`
- if [ "${name}" = "${RABBITMQ_NODENAME}" ] ; then
- ocf_log err "${LH} Found raised memory alarm. Erasing the alarm and restarting."
- su_rabbit_cmd "${OCF_RESKEY_ctl} set_vm_memory_high_watermark 10 > /dev/null 2>&1"
- rc=$OCF_ERR_GENERIC
- break
- fi
- done
- fi
- fi
-
- if ! is_cluster_status_ok ; then
- rc=$OCF_ERR_GENERIC
- fi
-
- # Check if the list of all queues is available,
- # Also report some queues stats and total virtual memory.
- local queues
- local rc_queues
- local timeout_queues
- queues=`su_rabbit_cmd "${OCF_RESKEY_ctl} -q -p ${OCF_RESKEY_default_vhost} list_queues memory messages consumer_utilisation"`
- rc_queues=$?
- check_timeouts $rc_queues "rabbit_list_queues_timeouts" "list_queues"
- timeout_queues=$?
-
- if [ $timeout_queues -eq 2 ]; then
- master_score 0
- return $OCF_ERR_GENERIC
-
- elif [ $timeout_queues -eq 0 ]; then
- if [ $rc_queues -ne 0 ]; then
- ocf_log err "${LH} rabbitmqctl list_queues exited with errors."
- rc=$OCF_ERR_GENERIC
-
- elif [ -n "${queues}" ]; then
- local q_c
- q_c=`printf %b "${queues}\n" | wc -l`
- local mem
- mem=`printf %b "${queues}\n" | awk -v sum=0 '{sum+=$1} END {print (sum/1048576)}'`
- local mes
- mes=`printf %b "${queues}\n" | awk -v sum=0 '{sum+=$2} END {print sum}'`
- local c_u
- c_u=`printf %b "${queues}\n" | awk -v sum=0 -v cnt=${q_c} '{sum+=$3} END {print (sum+1)/(cnt+1)}'`
- local status
- status=`echo $(su_rabbit_cmd "${OCF_RESKEY_ctl} -q status")`
- ocf_log info "${LH} RabbitMQ is running ${q_c} queues consuming ${mem}m of ${TOTALVMEM}m total, with ${mes} queued messages, average consumer utilization ${c_u}"
- ocf_log info "${LH} RabbitMQ status: ${status}"
- fi
- fi
-
- return $rc
-}
-
-ocf_get_private_attr() {
- local attr_name="${1:?}"
- local attr_default_value="${2:?}"
- local nodename="${3:-$THIS_PCMK_NODE}"
- local count
- count=$(attrd_updater -p --name "$attr_name" --node "$nodename" --query)
- if [ $? -ne 0 ]; then
- echo $attr_default_value
- else
- echo "$count" | awk -vdef_val="$attr_default_value" '{ gsub(/"/, "", $3); split($3, vals, "="); if (vals[2] != "") print vals[2]; else print def_val }'
- fi
-}
-
-ocf_update_private_attr() {
- local attr_name="${1:?}"
- local attr_value="${2:?}"
- local nodename="${3:-$THIS_PCMK_NODE}"
- ocf_run attrd_updater -p --name "$attr_name" --node "$nodename" --update "$attr_value"
-}
-
-rabbitmqctl_with_timeout_check() {
- local command="${1:?}"
- local timeout_attr_name="${2:?}"
-
- su_rabbit_cmd "${OCF_RESKEY_ctl} $command"
- local rc=$?
-
- check_timeouts $rc $timeout_attr_name "$command"
- local has_timed_out=$?
-
- case "$has_timed_out" in
- 0)
- return $rc;;
- 1)
- return 0;;
- 2)
- return 1;;
- esac
-}
-
-is_cluster_status_ok() {
- local LH="${LH}: is_cluster_status_ok:"
- rabbitmqctl_with_timeout_check cluster_status rabbit_cluster_status_timeouts > /dev/null 2>&1
-}
-
-action_monitor() {
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} monitor:"
- ocf_log debug "${LH} action start."
- if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
- d=`date '+%Y%m%d %H:%M:%S'`
- echo $d >> /tmp/rmq-monitor.log
- env >> /tmp/rmq-monitor.log
- echo "$d [monitor] start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
- fi
- get_monitor
- rc=$?
- ocf_log debug "${LH} role: ${OCF_RESKEY_CRM_meta_role}"
- ocf_log debug "${LH} result: $rc"
- ocf_log debug "${LH} action end."
- return $rc
-}
-
-
-action_start() {
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} start:"
- local nowtime
-
- if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
- d=`date '+%Y%m%d %H:%M:%S'`
- echo $d >> /tmp/rmq-start.log
- env >> /tmp/rmq-start.log
- echo "$d [start] start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
- fi
-
- ocf_log info "${LH} action begin."
-
- get_status
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ] ; then
- ocf_log warn "${LH} RMQ-runtime (beam) already started."
- return $OCF_SUCCESS
- fi
-
- local attrs_to_zero="rabbit_list_channels_timeouts rabbit_get_alarms_timeouts rabbit_list_queues_timeouts rabbit_cluster_status_timeouts rabbit_node_health_check_timeouts"
- local attr_name_to_reset
- for attr_name_to_reset in $attrs_to_zero; do
- ocf_update_private_attr $attr_name_to_reset 0
- done
-
- nowtime=$(now)
- ocf_log info "${LH} Setting phase 1 one start time to $nowtime"
- ocf_update_private_attr 'rabbit-start-phase-1-time' "$nowtime"
- ocf_log info "${LH} Deleting start time attribute"
- ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-start-time' --delete
- ocf_log info "${LH} Deleting master attribute"
- ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-master' --delete
-
- ocf_log info "${LH} RMQ going to start."
- start_rmq_server_app
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ] ; then
- ocf_log info "${LH} RMQ prepared for start succesfully."
- fi
-
- ocf_log info "${LH} action end."
- return $rc
-}
-
-
-action_stop() {
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} stop:"
-
- if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
- d=$(date '+%Y%m%d %H:%M:%S')
- echo $d >> /tmp/rmq-stop.log
- env >> /tmp/rmq-stop.log
- echo "$d [stop] start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
- fi
-
- ocf_log info "${LH} action begin."
-
- ocf_log info "${LH} Deleting master attribute"
- ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-master' --delete
- master_score 0
- ocf_log info "${LH} Deleting start time attribute"
- ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-start-time' --delete
-
- # Wait for synced state first
- ocf_log info "${LH} waiting $((OCF_RESKEY_stop_time/2)) to sync"
- wait_sync $((OCF_RESKEY_stop_time/2))
-
- ocf_log info "${LH} RMQ-runtime (beam) going to down."
- stop_server_process
-
- if [ $? -ne $OCF_SUCCESS ] ; then
- ocf_log err "RMQ-runtime (beam) couldn't be stopped and will likely became unmanaged. Take care of it manually!"
- ocf_log info "${LH} action end."
- exit $OCF_ERR_GENERIC
- fi
-
- ocf_log info "${LH} RMQ-runtime (beam) not running."
- ocf_log info "${LH} action end."
- return $OCF_SUCCESS
-}
-
-#######################################################################
-# Enhanced list_channels:
-# - nodes are processed in parallel
-# - report contains information about which nodes timed out
-#
-# 'list_channels' is used as a healh-check for current node, but it
-# actually checks overall health of all node in cluster. And there were
-# some bugs where only one (non-local) channel became stuck, but OCF
-# script was wrongfully killing local node.
-#
-# Hopefully all such bugs are fixed, but if not - it will allow to
-# detect such conditions.
-#
-# Somewhat strange implementation is due to the following reasons:
-# - ability to support older versions of RabbitMQ which have reached
-# end-of-life with single version of the script
-# - zero dependencies - for older versions this functionality could be
-# implemented as a plugin, but it'll require this plugin installation
-enhanced_list_channels() {
- # One second less than timeout of su_rabbit_cmd
- local timeout=$((${TIMEOUT_ARG:-5} - 1))
-
- su_rabbit_cmd "xargs -0 ${OCF_RESKEY_ctl} eval" <<EOF
-SecondsToCompletion = $timeout,
-
-%% Milliseconds since unix epoch
-Now = fun() ->
- {Mega, Secs, Micro} = os:timestamp(),
- Mili = Micro div 1000,
- Mili + 1000 * (Secs + 1000000 * Mega)
- end,
-
-%% We shouldn't continue execution past this time
-ShouldEndAt = Now() + SecondsToCompletion * 1000,
-
-%% How many milliseconds we still have
-Timeout = fun() ->
- case ShouldEndAt - Now() of
- Past when Past =< 0 ->
- 0;
- Timeout ->
- Timeout
- end
- end,
-
-%% Lambda combinator - for defining anonymous recursive functions
-Y = fun(F) ->
- (fun (X) -> F(fun(Y) -> (X(X))(Y) end) end)(
- fun (X) -> F(fun(Y) -> (X(X))(Y) end) end)
- end,
-
-Parent = self(),
-
-ListChannels = Y(fun(Rec) ->
- fun (({Node, [], OkChannelsCount})) ->
- Parent ! {Node, ok, OkChannelsCount};
- ({Node, [Chan|Rest], OkChannelsCount}) ->
- case catch rpc:call(Node, rabbit_channel, info, [Chan], Timeout()) of
- Infos when is_list(Infos) ->
- Rec({Node, Rest, OkChannelsCount + 1});
- {badrpc, {'EXIT', {noproc, _}}} ->
- %% Channel became dead before we could request it's status, don't care
- Rec({Node, Rest, OkChannelsCount});
- Err ->
- Parent ! {Node, Err, OkChannelsCount}
- end
- end
- end),
-
-SingleNodeListing = fun(Node) ->
- case catch rpc:call(Node, pg_local, get_members, [rabbit_channels], Timeout()) of
- LocalChannels when is_list(LocalChannels) ->
- ListChannels({Node, LocalChannels, 0});
- Err ->
- Parent ! {Node, Err, 0}
- end
- end,
-
-AllNodes = rabbit_mnesia:cluster_nodes(running),
-[ spawn(fun() -> SingleNodeListing(Node) end) || Node <- AllNodes ],
-
-WaitForNodes = Y(fun(Rec) ->
- fun ({[], Acc}) ->
- Acc;
- ({RemainingNodes, Acc}) ->
- receive
- {Node, _Status, _ChannelCount} = Smth ->
- RemainingNodes1 = lists:delete(Node, RemainingNodes),
- Rec({RemainingNodes1, [Smth|Acc]})
- after Timeout() + 100 ->
- Acc
- end
- end
- end),
-
-Result = WaitForNodes({AllNodes, []}),
-
-ExpandedResult = [ case lists:keysearch(Node, 1, Result) of
- {value, NodeResult} ->
- NodeResult;
- false ->
- {Node, no_data_collected, 0}
- end || Node <- AllNodes ],
-
-ExpandedResult.
-EOF
-}
-
-#######################################################################
-# Join the cluster and return OCF_SUCCESS, if joined.
-# Return 10, if node is trying to join to itself or empty destination.
-# Return OCF_ERR_GENERIC, if cannot join.
-jjj_join () {
- local join_to="$1"
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} jjj_join:"
-
- my_host ${join_to}
- rc=$?
- ocf_log debug "${LH} node='${join_to}' rc='${rc}'"
-
- # Check whether we are joining to ourselves
- # or master host is not given
- if [ $rc -ne 0 -a "${join_to}" ] ; then
- ocf_log info "${LH} Joining to cluster by node '${join_to}'"
- join_to_cluster "${join_to}"
- rc=$?
- if [ $rc -ne $OCF_SUCCESS ] ; then
- ocf_log err "${LH} Failed to join the cluster. The mnesia will be reset."
- reset_mnesia
- rc=$OCF_ERR_GENERIC
- fi
- fi
- return $rc
-}
-
-action_notify() {
- local rc_join=$OCF_SUCCESS
- local rc=$OCF_ERR_GENERIC
- local rc2=$OCF_ERR_GENERIC
- local LH="${LL} notify:"
- local nodelist
-
- if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
- d=`date '+%Y%m%d %H:%M:%S'`
- echo $d >> /tmp/rmq-notify.log
- env >> /tmp/rmq-notify.log
- echo "$d [notify] ${OCF_RESKEY_CRM_meta_notify_type}-${OCF_RESKEY_CRM_meta_notify_operation} promote='${OCF_RESKEY_CRM_meta_notify_promote_uname}' demote='${OCF_RESKEY_CRM_meta_notify_demote_uname}' master='${OCF_RESKEY_CRM_meta_notify_master_uname}' slave='${OCF_RESKEY_CRM_meta_notify_slave_uname}' start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
- fi
-
- if [ "${OCF_RESKEY_CRM_meta_notify_type}" = 'post' ] ; then
- # POST- anything notify section
- case "$OCF_RESKEY_CRM_meta_notify_operation" in
- promote)
- ocf_log info "${LH} post-promote begin."
-
- rc=$OCF_SUCCESS
-
- # Do nothing, if the list of nodes being promoted reported empty.
- # Delegate recovery, if needed, to the "running out of the cluster" monitor's logic
- if [ -z "${OCF_RESKEY_CRM_meta_notify_promote_uname}" ] ; then
- ocf_log warn "${LH} there are no nodes to join to reported on post-promote. Nothing to do."
-
- elif my_host "${OCF_RESKEY_CRM_meta_notify_promote_uname}"; then
- ocf_log info "${LH} ignoring post-promote of self"
-
- elif is_clustered_with "${OCF_RESKEY_CRM_meta_notify_promote_uname}"; then
- if get_status rabbit; then
- ocf_log info "${LH} we are already clustered with master - ${OCF_RESKEY_CRM_meta_notify_promote_uname}. Nothing to do."
- else
- ocf_log info "${LH} we are already clustered with master - ${OCF_RESKEY_CRM_meta_notify_promote_uname}. We only need to start the app."
-
- try_to_start_rmq_app
- rc2=$?
- update_rabbit_start_time_if_rc $rc2
- fi
-
- else
- # Note, this should fail when the mnesia is inconsistent.
- # For example, when the "old" master processing the promition of the new one.
- # Later this ex-master node will rejoin the cluster at post-start.
- jjj_join "${OCF_RESKEY_CRM_meta_notify_promote_uname}"
- rc=$?
- if [ $rc -eq $OCF_ERR_GENERIC ] ; then
- ocf_log err "${LH} Failed to join the cluster on post-promote. The resource will be restarted."
- fi
- fi
-
- ocf_log info "${LH} post-promote end."
- return $rc
- ;;
- start)
- ocf_log info "${LH} post-start begin."
- # Do nothing, if the list of nodes being started or running reported empty
- # Delegate recovery, if needed, to the "running out of the cluster" monitor's logic
- if [ -z "${OCF_RESKEY_CRM_meta_notify_start_uname}" -a -z "${OCF_RESKEY_CRM_meta_notify_active_uname}" ] ; then
- ocf_log warn "${LH} I'm a last man standing and I must survive!"
- ocf_log info "${LH} post-start end."
- return $OCF_SUCCESS
- fi
- # check did this event from this host
- my_host "${OCF_RESKEY_CRM_meta_notify_start_uname}"
- rc=$?
- # Do nothing, if there is no master reported
- # Delegate recovery, if needed, to the "running out of the cluster" monitor's logic
- if [ -z "${OCF_RESKEY_CRM_meta_notify_master_uname}" ] ; then
- ocf_log warn "${LH} there are no nodes to join to reported on post-start. Nothing to do."
- ocf_log info "${LH} post-start end."
- return $OCF_SUCCESS
- fi
- if [ $rc -eq $OCF_SUCCESS ] ; then
- # Now we need to:
- # a. join to the cluster if we are not joined yet
- # b. start the RabbitMQ application, which is always
- # stopped after start action finishes
- check_need_join_to ${OCF_RESKEY_CRM_meta_notify_master_uname}
- rc_join=$?
- if [ $rc_join -eq $OCF_SUCCESS ]; then
- ocf_log warn "${LH} Going to join node ${OCF_RESKEY_CRM_meta_notify_master_uname}"
- jjj_join "${OCF_RESKEY_CRM_meta_notify_master_uname}"
- rc2=$?
- else
- ocf_log warn "${LH} We are already clustered with node ${OCF_RESKEY_CRM_meta_notify_master_uname}"
-
- try_to_start_rmq_app
- rc2=$?
- update_rabbit_start_time_if_rc $rc2
- fi
- if [ -s "${OCF_RESKEY_definitions_dump_file}" ] ; then
- ocf_log info "File ${OCF_RESKEY_definitions_dump_file} exists"
- ocf_run curl --silent --show-error --request POST --user $OCF_RESKEY_admin_user:$OCF_RESKEY_admin_password $OCF_RESKEY_host_ip:15672/api/definitions --header "Content-Type:application/json" --data @$OCF_RESKEY_definitions_dump_file
- rc=$?
- if [ $rc -eq $OCF_SUCCESS ] ; then
- ocf_log info "RMQ definitions have imported succesfully."
- else
- ocf_log err "RMQ definitions have not imported."
- fi
- fi
- if [ $rc2 -eq $OCF_ERR_GENERIC ] ; then
- ocf_log warn "${LH} Failed to join the cluster on post-start. The resource will be restarted."
- ocf_log info "${LH} post-start end."
- return $OCF_ERR_GENERIC
- fi
- fi
- ocf_log info "${LH} post-start end."
- ;;
- stop)
- # if rabbitmq-server stops on any another node, we should remove it from cluster (as ordinary operation)
- ocf_log info "${LH} post-stop begin."
- # Report not running, if there are no nodes being stopped reported
- if [ -z "${OCF_RESKEY_CRM_meta_notify_stop_uname}" ] ; then
- ocf_log warn "${LH} there are no nodes being stopped reported on post-stop. The resource will be restarted."
- ocf_log info "${LH} post-stop end."
- return $OCF_ERR_GENERIC
- fi
- my_host "${OCF_RESKEY_CRM_meta_notify_stop_uname}"
- rc=$?
- if [ $rc -ne $OCF_SUCCESS ] ; then
- # Wait for synced state first
- ocf_log info "${LH} waiting $((OCF_RESKEY_stop_time/2)) to sync"
- wait_sync $((OCF_RESKEY_stop_time/2))
- # On other nodes processing the post-stop, make sure the stopped node will be forgotten
- unjoin_nodes_from_cluster "${OCF_RESKEY_CRM_meta_notify_stop_uname}"
- else
- # On the nodes being stopped, reset the master score
- ocf_log info "${LH} resetting the master score."
- master_score 0
- fi
- # always returns OCF_SUCCESS
- ocf_log info "${LH} post-stop end."
- ;;
- *) ;;
- esac
- fi
-
- return $OCF_SUCCESS
-}
-
-
-action_promote() {
- local rc=$OCF_ERR_GENERIC
- local LH="${LL} promote:"
-
- if [ "${OCF_RESKEY_debug}" = 'true' ] ; then
- d=$(date '+%Y%m%d %H:%M:%S')
- echo $d >> /tmp/rmq-promote.log
- env >> /tmp/rmq-promote.log
- echo "$d [promote] start='${OCF_RESKEY_CRM_meta_notify_start_uname}' stop='${OCF_RESKEY_CRM_meta_notify_stop_uname}' active='${OCF_RESKEY_CRM_meta_notify_active_uname}' inactive='${OCF_RESKEY_CRM_meta_notify_inactive_uname}'" >> /tmp/rmq-ocf.log
- fi
-
- ocf_log info "${LH} action begin."
-
- get_monitor
- rc=$?
- ocf_log info "${LH} get_monitor returns ${rc}"
- case "$rc" in
- "$OCF_SUCCESS")
- # Running as slave. Normal, expected behavior.
- ocf_log info "${LH} Resource is currently running as Slave"
- # rabbitmqctl start_app if need
- get_status rabbit
- rc=$?
- ocf_log info "${LH} Updating cluster master attribute"
- ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-master' --update 'true'
- if [ $rc -ne $OCF_SUCCESS ] ; then
- ocf_log info "${LH} RMQ app is not started. Starting..."
- start_rmq_server_app
- rc=$?
- if [ $rc -eq 0 ] ; then
- try_to_start_rmq_app
- rc=$?
- if [ $rc -ne 0 ] ; then
- ocf_log err "${LH} Can't start RMQ app. Master resource is failed."
- ocf_log info "${LH} action end."
- exit $OCF_FAILED_MASTER
- fi
-
- [ -f "${OCF_RESKEY_policy_file}" ] && . "${OCF_RESKEY_policy_file}"
-
- update_rabbit_start_time_if_rc $rc
-
- ocf_log info "${LH} Checking master status"
- get_monitor
- rc=$?
- ocf_log info "${LH} Master status is $rc"
- if [ $rc = $OCF_RUNNING_MASTER ]
- then
- rc=$OCF_SUCCESS
- else
- ocf_log err "${LH} Master resource is failed."
- ocf_log info "${LH} action end."
- exit $OCF_FAILED_MASTER
- fi
- else
- ocf_log err "${LH} Can't start RMQ-runtime."
- rc=$OCF_ERR_GENERIC
- fi
- fi
- return $rc
- ;;
- "$OCF_RUNNING_MASTER")
- # Already a master. Unexpected, but not a problem.
- ocf_log warn "${LH} Resource is already running as Master"
- rc=$OCF_SUCCESS
- ;;
-
- "$OCF_FAILED_MASTER")
- # Master failed.
- ocf_log err "${LH} Master resource is failed and not running"
- ocf_log info "${LH} action end."
- exit $OCF_FAILED_MASTER
- ;;
-
- "$OCF_NOT_RUNNING")
- # Currently not running.
- ocf_log err "${LH} Resource is currently not running"
- rc=$OCF_NOT_RUNNING
- ;;
- *)
- # Failed resource. Let the cluster manager recover.
- ocf_log err "${LH} Unexpected error, cannot promote"
- ocf_log info "${LH} action end."
- exit $rc
- ;;
- esac
-
- # transform slave RMQ-server to master
-
- ocf_log info "${LH} action end."
- return $rc
-}
-
-
-action_demote() {
- local LH="${LL} demote:"
- ocf_log info "${LH} action begin."
- ocf_run crm_attribute -N $THIS_PCMK_NODE -l reboot --name 'rabbit-master' --delete
- ocf_log info "${LH} action end."
- return $OCF_SUCCESS
-}
-#######################################################################
-
-rmq_setup_env
-
-case "$1" in
- meta-data) meta_data
- exit $OCF_SUCCESS;;
- usage|help) usage
- exit $OCF_SUCCESS;;
-esac
-
-# Anything except meta-data and help must pass validation
-action_validate || exit $?
-
-# What kind of method was invoked?
-case "$1" in
- start) action_start;;
- stop) action_stop;;
- status) action_status;;
- monitor) action_monitor;;
- validate) action_validate;;
- promote) action_promote;;
- demote) action_demote;;
- notify) action_notify;;
- validate-all) action_validate;;
- *) usage;;
-esac
-###
diff --git a/tools/BUILD.bazel b/tools/BUILD.bazel
new file mode 100644
index 0000000000..e90ac62a9b
--- /dev/null
+++ b/tools/BUILD.bazel
@@ -0,0 +1,5 @@
+load(":erlang_ls.bzl", "erlang_ls_config")
+
+erlang_ls_config(
+ name = "erlang_ls.config",
+)
diff --git a/tools/erlang_ls.bzl b/tools/erlang_ls.bzl
new file mode 100644
index 0000000000..073a83787a
--- /dev/null
+++ b/tools/erlang_ls.bzl
@@ -0,0 +1,38 @@
+load("@bazel-erlang//:erlang_home.bzl", "ErlangHomeProvider")
+
+def _impl(ctx):
+ out = ctx.actions.declare_file(ctx.label.name)
+
+ erlang_home = ctx.attr._erlang_home[ErlangHomeProvider].path
+
+ ctx.actions.write(
+ output = out,
+ content = """otp_path: {erlang_home}
+apps_dirs:
+ - deps/*
+ - deps/rabbit/apps/*
+deps_dirs:
+ - bazel-bin/external/*
+include_dirs:
+ - deps
+ - deps/*
+ - deps/*/include
+ - deps/*/src
+ - bazel-bin/external
+ - bazel-bin/external/*/include
+plt_path: bazel-bin/deps/rabbit/.base_plt.plt
+""".format(
+ erlang_home = erlang_home,
+ ),
+ )
+
+ return [
+ DefaultInfo(files = depset([out])),
+ ]
+
+erlang_ls_config = rule(
+ implementation = _impl,
+ attrs = {
+ "_erlang_home": attr.label(default = "@bazel-erlang//:erlang_home"),
+ },
+)
diff --git a/user-template.bazelrc b/user-template.bazelrc
new file mode 100644
index 0000000000..f92ee3238a
--- /dev/null
+++ b/user-template.bazelrc
@@ -0,0 +1,17 @@
+build --@bazel-erlang//:erlang_home=/Users/rabbitmq/kerl/24.0
+build --@bazel-erlang//:erlang_version=24.0
+build --//:elixir_home=/Users/rabbitmq/.kiex/elixirs/elixir-1.12.0/lib/elixir
+
+# rabbitmqctl wait shells out to 'ps', which is broken in the bazel macOS
+# sandbox (https://github.com/bazelbuild/bazel/issues/7448)
+# adding "--spawn_strategy=local" to the invocation is a workaround
+build --spawn_strategy=local
+
+# don't re-run flakes automatically on the local machine
+build --flaky_test_attempts=1
+
+build:buildbuddy --remote_header=x-buildbuddy-api-key=YOUR_API_KEY
+
+# cross compile for linux (if on macOS) with rbe
+build:rbe --host_cpu=k8
+build:rbe --cpu=k8
diff --git a/workflow_sources/base_image/workflow.yml b/workflow_sources/base_image/workflow.yml
deleted file mode 100644
index e1b32c708b..0000000000
--- a/workflow_sources/base_image/workflow.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-#@ load("@ytt:data", "data")
-
-#@ def jobs_map_from_list(list_of_jobs):
-#@ jobs = {}
-#@ for job in list_of_jobs:
-#@ name = job['name']
-#@ jobs[name] = job
-#@ end
-#@ return jobs
-#@ end
-
-#@ def to_build_args(d):
-#@ return ",".join(['{0}={1}'.format(k,d[k]) for k in d.keys()])
-#@ end
-
-#@ def base_image_job(erlang_version):
-name: #@ ("ci-base-" + erlang_version).replace(".", "_")
-runs-on: ubuntu-18.04
-steps:
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: #@ 'CREATE ERLANG+ELIXIR IMAGE ({})'.format(erlang_version)
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/erlang_elixir
- dockerfile: #@ 'ci/dockerfiles/{}/erlang_elixir'.format(erlang_version)
- tags: #@ erlang_version
- - name: #@ 'CREATE BASE CI IMAGE ({})'.format(erlang_version)
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci-base
- dockerfile: ci/dockerfiles/ci-base
- #@ build_args = {}
- #@ build_args["ERLANG_VERSION"] = erlang_version
- #@ build_args["SECONDARY_UMBRELLA_GITREFS"] = ' '.join(data.values.secondary_umbrella_gitrefs)
- build_args: #@ to_build_args(build_args)
- tags: #@ erlang_version
-#@ end
----
-name: Workflow Base Images
-#! https://github.com/k14s/ytt/issues/189
-a_magic_string_that_we_will_sed_to_on: [workflow_dispatch]
-jobs: #@ jobs_map_from_list([base_image_job(v) for v in data.values.erlang_versions])
diff --git a/workflow_sources/base_values.yml b/workflow_sources/base_values.yml
deleted file mode 100644
index e40c75bb16..0000000000
--- a/workflow_sources/base_values.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-#@data/values
----
-base_rmq_ref: master
-secondary_umbrella_gitrefs:
-- v3.7.28
-- v3.8.9
-#! these values are injected at template time from the cli,
-#! but must be declared first here
-erlang_versions: []
-erlang_version: null
diff --git a/workflow_sources/deps.yml b/workflow_sources/deps.yml
deleted file mode 100644
index c5ea3e5af6..0000000000
--- a/workflow_sources/deps.yml
+++ /dev/null
@@ -1,430 +0,0 @@
-#@data/values
----
-#@overlay/match missing_ok=True
-deps:
-- name: rabbit_common
- suites:
- - name: unit_priority_queue
- - name: unit
- - name: worker_pool
- - name: rabbit_env
-- name: rabbit
- suites:
- - name: amqqueue_backward_compatibility
- time: 63
- - name: backing_queue
- time: 320
- - name: channel_interceptor
- time: 103
- - name: channel_operation_timeout
- time: 104
- - name: cluster
- time: 85
- - name: cluster_rename
- time: 284
- - name: clustering_management
- time: 708
- - name: config_schema
- time: 81
- - name: confirms_rejects
- time: 138
- - name: consumer_timeout
- time: 123
- - name: crashing_queues
- time: 124
- - name: dead_lettering
- time: 173
- - name: definition_import
- time: 84
- - name: disconnect_detected_during_alarm
- time: 88
- - name: dynamic_ha
- time: 900
- - name: dynamic_qq
- time: 238
- - name: eager_sync
- time: 452
- - name: feature_flags
- time: 102
- - name: lazy_queue
- time: 125
- - name: list_consumers_sanity_check
- time: 86
- - name: list_queues_online_and_offline
- time: 85
- - name: maintenance_mode
- time: 185
- - name: many_node_ha
- time: 112
- - name: message_size_limit
- time: 78
- - name: metrics
- time: 160
- - name: mirrored_supervisor
- time: 82
- - name: msg_store
- time: 65
- - name: peer_discovery_classic_config
- time: 136
- - name: peer_discovery_dns
- time: 76
- - name: per_user_connection_channel_limit
- time: 69
- - name: per_user_connection_channel_limit_partitions
- time: 56
- - name: per_user_connection_channel_tracking
- time: 89
- - name: per_user_connection_tracking
- time: 132
- - name: per_vhost_connection_limit
- time: 252
- - name: per_vhost_connection_limit_partitions
- time: 123
- - name: per_vhost_msg_store
- time: 66
- - name: per_vhost_queue_limit
- time: 137
- - name: policy
- time: 86
- - name: priority_queue
- time: 243
- - name: priority_queue_recovery
- time: 90
- - name: product_info
- time: 93
- - name: proxy_protocol
- time: 78
- - name: publisher_confirms_parallel
- time: 126
- - name: queue_length_limits
- time: 84
- - name: queue_master_location
- time: 401
- - name: queue_parallel
- time: 260
- - name: queue_type
- time: 56
- - name: quorum_queue
- time: 687
- - name: rabbit_confirms
- time: 4
- - name: rabbit_core_metrics_gc
- time: 221
- - name: rabbit_fifo
- time: 79
- - name: rabbit_fifo_int
- time: 86
- - name: rabbit_fifo_prop
- time: 169
- - name: rabbit_fifo_v0
- time: 6
- - name: rabbit_msg_record
- time: 4
- - name: rabbit_stream_queue
- time: 592
- - name: rabbitmq_queues_cli_integration
- time: 85
- - name: rabbitmqctl_integration
- time: 100
- - name: rabbitmqctl_shutdown
- time: 67
- - name: signal_handling
- time: 164
- - name: simple_ha
- time: 426
- - name: single_active_consumer
- time: 78
- - name: sync_detection
- time: 125
- - name: term_to_binary_compat_prop
- time: 66
- - name: topic_permission
- time: 77
- - name: unit_access_control
- time: 90
- - name: unit_access_control_authn_authz_context_propagation
- time: 87
- - name: unit_access_control_credential_validation
- time: 74
- - name: unit_amqp091_content_framing
- time: 67
- - name: unit_amqp091_server_properties
- time: 95
- - name: unit_app_management
- time: 79
- - name: unit_cluster_formation_locking_mocks
- time: 56
- - name: unit_collections
- time: 61
- - name: unit_config_value_encryption
- time: 99
- - name: unit_connection_tracking
- time: 16
- - name: unit_credit_flow
- time: 81
- - name: unit_disk_monitor
- time: 80
- - name: unit_disk_monitor_mocks
- time: 76
- - name: unit_file_handle_cache
- time: 93
- - name: unit_gen_server2
- time: 80
- - name: unit_gm
- time: 65
- - name: unit_log_config
- time: 75
- - name: unit_log_management
- time: 104
- - name: unit_operator_policy
- time: 66
- - name: unit_pg_local
- time: 68
- - name: unit_plugin_directories
- time: 52
- - name: unit_plugin_versioning
- time: 69
- - name: unit_policy_validators
- time: 68
- - name: unit_priority_queue
- time: 70
- - name: unit_queue_consumers
- time: 73
- - name: unit_stats_and_metrics
- time: 88
- - name: unit_supervisor2
- time: 71
- - name: unit_vm_memory_monitor
- time: 81
- - name: upgrade_preparation
- time: 117
- - name: vhost
- time: 211
-- name: amqp_client
- skip_dialyzer: true
- suites:
- - name: unit
- - name: system
-- name: amqp10_client
- skip_tests: true
- suites:
- - name: system
- - name: msg
-- name: amqp10_common
- suites:
- - name: binary_generator
-- name: rabbitmq_amqp1_0
- skip_dialyzer: true
- skip_tests: true
- suites:
- - name: amqp10_client
- - name: proxy_protocol
- - name: command
- - name: unit
- - name: system
-- name: rabbitmq_auth_backend_cache
- skip_dialyzer: true
- suites:
- - name: rabbit_auth_backend_cache
- - name: rabbit_auth_cache
- - name: config_schema
-- name: rabbitmq_auth_backend_http
- skip_dialyzer: true
- suites:
- - name: unit
- - name: auth
- - name: config_schema
-- name: rabbitmq_auth_backend_ldap
- skip_dialyzer: true
- skip_tests: true
- suites:
- - name: unit
- - name: system
- - name: config_schema
-- name: rabbitmq_auth_backend_oauth2
- skip_dialyzer: true
- suites:
- - name: wildcard_match
- - name: unit
- - name: scope
- - name: system
- - name: add_uaa_key_command
-- name: rabbitmq_auth_mechanism_ssl
- suites: []
-- name: rabbitmq_aws
- skip_dialyzer: true
- suites: []
-- name: rabbitmq_cli
- skip_dialyzer: true
- suites: []
-- name: rabbitmq_codegen
- skip_xref: true
- skip_dialyzer: true
- skip_tests: true
- suites: []
-- name: rabbitmq_consistent_hash_exchange
- skip_dialyzer: true
- suites:
- - name: rabbit_exchange_type_consistent_hash
-- name: rabbitmq_event_exchange
- suites:
- - name: unit
- - name: system
- - name: config_schema
-- name: rabbitmq_federation
- skip_dialyzer: true
- suites:
- - name: unit_inbroker
- - name: restart_federation_link_command
- - name: rabbit_federation_status
- - name: federation_status_command
- - name: unit
- - name: queue
- - name: exchange
-- name: rabbitmq_federation_management
- suites:
- - name: federation_mgmt
-- name: rabbitmq_jms_topic_exchange
- skip_dialyzer: true
- suites:
- - name: sjx_evaluation
- - name: rjms_topic_selector
- - name: rjms_topic_selector_unit
-- name: rabbitmq_management
- skip_dialyzer: true
- suites:
- - name: cache
- - name: stats
- - name: rabbit_mgmt_rabbitmqadmin
- - name: listener_config
- - name: rabbit_mgmt_test_db
- - name: rabbit_mgmt_only_http
- - name: rabbit_mgmt_stats
- - name: rabbit_mgmt_test_unit
- - name: rabbit_mgmt_http
- - name: clustering_prop
- - name: clustering
- - name: config_schema
-- name: rabbitmq_management_agent
- skip_dialyzer: true
- suites:
- - name: metrics
- - name: exometer_slide
- - name: rabbit_mgmt_gc
- - name: rabbit_mgmt_slide
-- name: rabbitmq_mqtt
- skip_dialyzer: true
- skip_tests: true
- suites:
- - name: util
- - name: retainer
- - name: proxy_protocol
- - name: command
- - name: reader
- - name: mqtt_machine
- - name: processor
- - name: java
- - name: auth
- - name: cluster
- - name: config_schema
-- name: rabbitmq_peer_discovery_common
- suites:
- - name: config_schema
-- name: rabbitmq_peer_discovery_aws
- skip_dialyzer: true
- skip_tests: true
- suites:
- - name: rabbitmq_peer_discovery_aws
- - name: integration
- - name: config_schema
-- name: rabbitmq_peer_discovery_k8s
- skip_dialyzer: true
- suites:
- - name: rabbitmq_peer_discovery_k8s
- - name: config_schema
-- name: rabbitmq_peer_discovery_consul
- skip_dialyzer: true
- suites:
- - name: rabbitmq_peer_discovery_consul
- - name: config_schema
-- name: rabbitmq_peer_discovery_etcd
- skip_dialyzer: true
- skip_tests: true
- suites:
- - name: unit
- - name: system
- - name: config_schema
-- name: rabbitmq_prometheus
- skip_dialyzer: true
- suites:
- - name: rabbit_prometheus_http
- - name: config_schema
-- name: rabbitmq_random_exchange
- suites: []
-- name: rabbitmq_recent_history_exchange
- skip_dialyzer: true
- suites:
- - name: system
-- name: rabbitmq_sharding
- skip_dialyzer: true
- suites: []
-- name: rabbitmq_shovel
- skip_dialyzer: true
- suites:
- - name: amqp10_shovel
- - name: configuration
- - name: dynamic
- - name: parameters
- - name: config
- - name: shovel_status_command
- - name: delete_shovel_command
- - name: amqp10_dynamic
- - name: amqp10
-- name: rabbitmq_shovel_management
- suites:
- - name: http
-- name: rabbitmq_stomp
- skip_dialyzer: true
- suites:
- - name: util
- - name: proxy_protocol
- - name: amqqueue
- - name: command
- - name: topic
- - name: frame
- - name: python
- - name: connections
- - name: config_schema
-- name: rabbitmq_top
- suites: []
-- name: rabbitmq_tracing
- suites:
- - name: rabbit_tracing
-- name: rabbitmq_trust_store
- skip_dialyzer: true
- suites:
- - name: system
- - name: config_schema
-- name: rabbitmq_web_dispatch
- skip_dialyzer: true
- suites:
- - name: rabbit_web_dispatch
- - name: rabbit_web_dispatch_unit
-- name: rabbitmq_web_mqtt
- skip_dialyzer: true
- suites:
- - name: proxy_protocol
- - name: config_schema
-- name: rabbitmq_web_mqtt_examples
- suites: []
-- name: rabbitmq_web_stomp
- skip_dialyzer: true
- suites:
- - name: proxy_protocol
- - name: raw_websocket
- - name: cowboy_websocket
- - name: unit
- - name: config_schema
- - name: amqp_stomp
-- name: rabbitmq_web_stomp_examples
- suites: []
diff --git a/workflow_sources/test/ct.lib.yml b/workflow_sources/test/ct.lib.yml
deleted file mode 100644
index 8e6faa96e8..0000000000
--- a/workflow_sources/test/ct.lib.yml
+++ /dev/null
@@ -1,131 +0,0 @@
-#@ load("@ytt:data", "data")
-#@ load("@ytt:assert", "assert")
-#@ load("util.star", "is_unique", "group_by_one", "to_build_args")
-#@ load("helpers.star", "ci_image_tag", "ci_dep_image")
-
-#@ def checks_job(dep):
-name: #@ dep.name + "-checks"
-needs: [prepare]
-runs-on: ubuntu-18.04
-outputs:
- step_start: ${{ steps.buildevents.outputs.step_start }}
-#@yaml/text-templated-strings
-steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: VALIDATE KNOWN CT SUITES
- env:
- project: #@ dep.name
- #@ suite_names = [suite.name for suite in dep.suites]
- #@ None if is_unique(suite_names) else assert.fail('{} suite names are not unique'.format(dep.name))
- run: |
- ci/scripts/validate-workflow.sh (@= " ".join(suite_names) @)
- - name: RUN CHECKS
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci-(@= dep.name @)
- dockerfile: ci/dockerfiles/ci-dep
- #@ build_args = {}
- #@ build_args["IMAGE_TAG"] = ci_image_tag()
- #@ build_args["BUILDEVENT_APIKEY"] = '${{ secrets.HONEYCOMB_API_KEY }}'
- #@ build_args["project"] = dep.name
- build_args: #@ to_build_args(build_args)
- tags: (@= ci_image_tag() @)
-#@ end
-
-#@ def ct_suites_job(dep, group):
-name: #@ dep.name + "-ct-" + group["name"]
-needs:
- - prepare
- - #@ dep.name + "-checks"
-runs-on: ubuntu-18.04
-#@yaml/text-templated-strings
-steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- run: |
- gcloud auth configure-docker
- docker pull (@= ci_dep_image(dep.name) @)
- #@ for suite in group["suites"]:
- - name: #@ "RUN ct-" + suite
- run: |
- mkdir ct-(@= suite @)-logs && chmod 777 ct-(@= suite @)-logs
- docker run \
- --env project=(@= dep.name @) \
- --env CT_SUITE=(@= suite @) \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}/ct-(@= suite @)-logs:/workspace/ct-logs \
- (@= ci_dep_image(dep.name) @) \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- #! https://github.com/marketplace/actions/upload-artifact
- uses: actions/upload-artifact@v2-preview
- #! For whatever reason, "if: steps.(@= suite @).outcome == 'failure'" never executes,
- #! so just run always
- if: always()
- with:
- name: (@= dep.name @)-ct-(@= suite @)-logs
- path: "ct-(@= suite @)-logs/*-ct-logs-*.tar.xz"
- #@ if data.values.erlang_version == data.values.erlang_versions[0]:
- #@ for version in data.values.secondary_umbrella_gitrefs:
- #@ logs_dir = 'ct-{}-logs-{}'.format(suite, version)
- - name: #@ "RUN ct-{} [mixed {}]".format(suite, version)
- run: |
- mkdir (@= logs_dir @) && chmod 777 (@= logs_dir @)
- docker run \
- --env project=(@= dep.name @) \
- --env CT_SUITE=(@= suite @) \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env SECONDARY_UMBRELLA_VERSION=(@= version @) \
- --volume ${PWD}/(@= logs_dir @):/workspace/ct-logs \
- (@= ci_dep_image(dep.name) @) \
- /workspace/rabbitmq/ci/scripts/ct-suite.sh
- - name: #@ 'ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed {}]'.format(version)
- #! https://github.com/marketplace/actions/upload-artifact
- uses: actions/upload-artifact@v2-preview
- #! For whatever reason, "if: steps.(@= suite @).outcome == 'failure'" never executes,
- #! so just run always
- if: always()
- with:
- name: (@= dep.name @)-ct-(@= suite @)-logs-mixed-(@= version @)
- path: "(@= logs_dir @)/*-ct-logs-*.tar.xz"
- #@ end
- #@ end
- #@ end
-#@ end
-
-#@ def collect_job(dep):
-name: #@ dep.name
-needs: #@ [dep.name + "-checks"] + [dep.name + "-ct-" + group["name"] for group in group_by_one(dep.suites)]
-runs-on: ubuntu-18.04
-if: always()
-#@yaml/text-templated-strings
-steps:
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD STEP FINISH
- run: |
- gcloud auth configure-docker
- docker pull (@= ci_dep_image(dep.name) @)
- docker run \
- --env project=(@= dep.name @) \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ needs.(@= dep.name @)-checks.outputs.step_start }} \
- (@= ci_dep_image(dep.name) @) \
- /workspace/rabbitmq/ci/scripts/collect.sh
-#@ end \ No newline at end of file
diff --git a/workflow_sources/test/dep.star b/workflow_sources/test/dep.star
deleted file mode 100644
index a74b0bef99..0000000000
--- a/workflow_sources/test/dep.star
+++ /dev/null
@@ -1,22 +0,0 @@
-load("util.star", "group_by_one")
-load("rabbitmq_cli.lib.yml", "rabbitmq_cli_job")
-load("ct.lib.yml", "checks_job", "ct_suites_job", "collect_job")
-load("tests.lib.yml", "tests_job")
-
-def dep_jobs(dep):
- jobs = {}
- if not getattr(dep, "skip_tests", False):
- if dep.name == "rabbitmq_cli":
- jobs[dep.name] = rabbitmq_cli_job(dep)
- elif len(dep.suites) > 20:
- jobs[dep.name + "-checks"] = checks_job(dep)
- for group in group_by_one(dep.suites):
- jobs[dep.name + "-ct-" + group["name"]] = ct_suites_job(dep, group)
- end
- jobs[dep.name] = collect_job(dep)
- else:
- jobs[dep.name] = tests_job(dep)
- end
- end
- return jobs
-end
diff --git a/workflow_sources/test/finish.lib.yml b/workflow_sources/test/finish.lib.yml
deleted file mode 100644
index 59b5823d18..0000000000
--- a/workflow_sources/test/finish.lib.yml
+++ /dev/null
@@ -1,82 +0,0 @@
-#@ load("@ytt:data", "data")
-#@ load("helpers.star", "ci_image")
-
-#@ def gcs_path():
-#@ c = ['monorepo_github_actions_conclusions']
-#@ c.append('${{ github.sha }}')
-#@ c.append('${{ github.workflow }}')
-#@ return '/'.join(c)
-#@ end
-
-#@ def finish_jobs(prepare_jobs_names):
-package-generic-unix:
- name: package-generic-unix
- needs: [prepare]
- runs-on: ubuntu-18.04
- #@yaml/text-templated-strings
- steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PACKAGE GENERIC UNIX
- run: |
- gcloud auth configure-docker
- docker pull (@= ci_image() @)
- mkdir PACKAGES && chmod 777 PACKAGES
- docker run \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/PACKAGES:/workspace/rabbitmq/PACKAGES \
- (@= ci_image() @) \
- ci/scripts/package_generic_unix.sh
- - name: UPLOAD GENERIC UNIX PACKAGE ARTIFACT
- #! https://github.com/marketplace/actions/upload-artifact
- uses: actions/upload-artifact@v2-preview
- with:
- name: rabbitmq-server-generic-unix.tar.xz
- path: "PACKAGES/rabbitmq-server-generic-unix-*.tar.xz"
- - name: RECORD STEP FINISH
- if: always()
- run: |
- docker run \
- --env project=packaging \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- (@= ci_image() @) \
- ci/scripts/collect.sh
-finish:
- name: finish
- needs: #@ prepare_jobs_names + [dep.name for dep in data.values.deps if not getattr(dep, "skip_tests", False)] + ['package-generic-unix']
- runs-on: ubuntu-18.04
- if: always()
- #@yaml/text-templated-strings
- steps:
- - uses: technote-space/workflow-conclusion-action@v1
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD BUILD FINISH
- run: |
- echo -n "${{ env.WORKFLOW_CONCLUSION }}" > conclusion
-
- gsutil cp conclusion \
- 'gs://(@= gcs_path() @)'
-
- gcloud auth configure-docker
- docker pull (@= ci_image() @)
- docker run \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env BUILD_START=${{ needs.prepare.outputs.build_start }} \
- --env BUILD_RESULT=${{ env.WORKFLOW_CONCLUSION }} \
- (@= ci_image() @) \
- ci/scripts/finish.sh
-#@ end \ No newline at end of file
diff --git a/workflow_sources/test/helpers.star b/workflow_sources/test/helpers.star
deleted file mode 100644
index 17b3b1d0e4..0000000000
--- a/workflow_sources/test/helpers.star
+++ /dev/null
@@ -1,13 +0,0 @@
-load("@ytt:data", "data")
-
-def ci_image_tag():
- return "erlang-" + data.values.erlang_version + "-rabbitmq-${{ github.sha }}"
-end
-
-def ci_image():
- return "eu.gcr.io/cf-rabbitmq-core/ci:" + ci_image_tag()
-end
-
-def ci_dep_image(dep_name):
- return "eu.gcr.io/cf-rabbitmq-core/ci-" + dep_name + ":" + ci_image_tag()
-end
diff --git a/workflow_sources/test/prepare.lib.yml b/workflow_sources/test/prepare.lib.yml
deleted file mode 100644
index 5c97b7eb0b..0000000000
--- a/workflow_sources/test/prepare.lib.yml
+++ /dev/null
@@ -1,143 +0,0 @@
-#@ load("@ytt:data", "data")
-#@ load("util.star", "to_build_args")
-#@ load("helpers.star", "ci_image", "ci_image_tag")
-
-#@ def prepare_jobs():
-prepare:
- name: prepare
- runs-on: ubuntu-18.04
- outputs:
- build_start: ${{ steps.buildevents.outputs.build_start }}
- branch_or_tag_name: ${{ steps.buildevents.outputs.branch_or_tag_name }}
- #@yaml/text-templated-strings
- steps:
- - name: RECORD BUILD START
- id: buildevents
- run: |
- echo "::set-output name=build_start::$(date +%s)"
- branch_or_tag_name=${GITHUB_REF#refs/*/}
- echo "::set-output name=branch_or_tag_name::$branch_or_tag_name"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - name: PREPARE BUILD IMAGE
- uses: docker/build-push-action@v1
- with:
- username: _json_key
- password: ${{ secrets.GCR_JSON_KEY }}
- registry: eu.gcr.io
- repository: cf-rabbitmq-core/ci
- dockerfile: ci/dockerfiles/ci
- #@ build_args = {}
- #@ build_args["ERLANG_VERSION"] = data.values.erlang_version
- #@ build_args["GITHUB_RUN_ID"] = '${{ github.run_id }}'
- #@ build_args["BUILDEVENT_APIKEY"] = '${{ secrets.HONEYCOMB_API_KEY }}'
- #@ build_args["GITHUB_SHA"] = '${{ github.sha }}'
- #@ build_args["base_rmq_ref"] = data.values.base_rmq_ref
- #@ build_args["current_rmq_ref"] = '${{ steps.buildevents.outputs.branch_or_tag_name }}'
- #@ build_args["RABBITMQ_VERSION"] = data.values.base_rmq_ref.replace('master', '3.9.x').replace('.x', '.0')
- build_args: #@ to_build_args(build_args)
- tags: (@= ci_image_tag() @)
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RECORD STEP FINISH
- run: |
- docker run \
- --env project=prepare \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.build_start }} \
- (@= ci_image() @) \
- ci/scripts/collect.sh
-xref:
- name: xref
- runs-on: ubuntu-18.04
- #@yaml/text-templated-strings
- steps:
- - name: RECORD XREF START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- #@ base_image = "eu.gcr.io/cf-rabbitmq-core/ci-base:" + data.values.erlang_version
- run: |
- gcloud auth configure-docker
- docker pull (@= base_image @)
- #@ for dep in [d for d in data.values.deps if not getattr(d, "skip_xref", False)]:
- - name: RUN XREF (@= dep.name @)
- run: |
- docker run \
- --env project=(@= dep.name @) \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- (@= base_image @) \
- ci/scripts/xref.sh
- #@ end
- - name: RECORD STEP FINISH
- if: always()
- run: |
- docker run \
- --env project=xref \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ci/scripts:/workspace/rabbitmq/ci/scripts \
- --workdir /workspace/rabbitmq \
- (@= base_image @) \
- ci/scripts/collect.sh
-
-#@ if/end data.values.erlang_version == data.values.erlang_versions[-1]:
-dialyze:
- name: dialyze
- runs-on: ubuntu-18.04
- #@yaml/text-templated-strings
- steps:
- - name: RECORD DIALYZE START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - name: CHECKOUT REPOSITORY
- uses: actions/checkout@v2
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: PULL IMAGE
- #@ base_image = "eu.gcr.io/cf-rabbitmq-core/ci-base:" + data.values.erlang_version
- run: |
- gcloud auth configure-docker
- docker pull (@= base_image @)
- #@ for dep in [d for d in data.values.deps if not getattr(d, "skip_dialyzer", False)]:
- - name: RUN DIALYZE (@= dep.name @)
- run: |
- docker run \
- --env project=(@= dep.name @) \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --volume ${PWD}:/workspace/rabbitmq \
- --workdir /workspace/rabbitmq \
- (@= base_image @) \
- ci/scripts/dialyze.sh
- #@ end
- - name: RECORD STEP FINISH
- if: always()
- run: |
- docker run \
- --env project=dialyze \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ci/scripts:/workspace/rabbitmq/ci/scripts \
- --workdir /workspace/rabbitmq \
- (@= base_image @) \
- ci/scripts/collect.sh
-#@ end \ No newline at end of file
diff --git a/workflow_sources/test/rabbitmq_cli.lib.yml b/workflow_sources/test/rabbitmq_cli.lib.yml
deleted file mode 100644
index b0b6b73484..0000000000
--- a/workflow_sources/test/rabbitmq_cli.lib.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-#@ load("@ytt:data", "data")
-#@ load("helpers.star", "ci_image")
-
-#@ def rabbitmq_cli_job(dep):
-name: #@ dep.name
-needs:
- - prepare
-runs-on: ubuntu-18.04
-#@yaml/text-templated-strings
-steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull (@= ci_image() @)
- mkdir broker-logs && chmod 777 broker-logs
- docker run \
- --env project=(@= dep.name @) \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/broker-logs:/workspace/broker-logs \
- (@= ci_image() @) \
- ci/scripts/rabbitmq_cli.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- #! https://github.com/marketplace/actions/upload-artifact
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: (@= dep.name @)-broker-logs
- path: "broker-logs/broker-logs.tar.xz"
-#@ end \ No newline at end of file
diff --git a/workflow_sources/test/tests.lib.yml b/workflow_sources/test/tests.lib.yml
deleted file mode 100644
index 054144588f..0000000000
--- a/workflow_sources/test/tests.lib.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-#@ load("@ytt:data", "data")
-#@ load("util.star", "to_build_args")
-#@ load("helpers.star", "ci_image")
-
-#@ def tests_job(dep):
-name: #@ dep.name
-needs: [prepare]
-runs-on: ubuntu-18.04
-#@yaml/text-templated-strings
-steps:
- - name: RECORD STEP START
- id: buildevents
- run: |
- echo "::set-output name=step_start::$(date +%s)"
- - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
- with:
- service_account_key: ${{ secrets.GCR_JSON_KEY }}
- export_default_credentials: true
- - name: RUN TESTS
- run: |
- gcloud auth configure-docker
- docker pull (@= ci_image() @)
- mkdir ct-logs && chmod 777 ct-logs
- docker run \
- --env project=(@= dep.name @) \
- --env GITHUB_RUN_ID=${{ github.run_id }} \
- --env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
- --env STEP_START=${{ steps.buildevents.outputs.step_start }} \
- --volume ${PWD}/ct-logs:/workspace/ct-logs \
- --oom-kill-disable \
- (@= ci_image() @) \
- ci/scripts/tests.sh
- - name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
- #! https://github.com/marketplace/actions/upload-artifact
- uses: actions/upload-artifact@v2-preview
- if: failure()
- with:
- name: (@= dep.name @)-ct-logs
- path: "ct-logs/*-ct-logs-*.tar.xz"
-#@ end \ No newline at end of file
diff --git a/workflow_sources/test/util.star b/workflow_sources/test/util.star
deleted file mode 100644
index 2c6d348564..0000000000
--- a/workflow_sources/test/util.star
+++ /dev/null
@@ -1,58 +0,0 @@
-def is_unique(l):
- return len(l) == len(set(l))
-end
-
-def merge(dicts):
- r = {}
- for d in dicts:
- r.update(**d)
- end
- return r
-end
-
-def name(suites):
- if len(suites) == 1:
- return suites[0].name
- else:
- return suites[0].name + "-plus-" + str(len(suites) - 1) + "-more"
- end
-end
-
-def sum(ints):
- s = 0
- for i in ints:
- s += i
- end
- return s
-end
-
-def partition(target, groups, suites):
- if len(suites) == 0:
- return groups
- end
- group = []
- rest = []
- for suite in sorted(suites, key=lambda suite: suite.time):
- if sum([suite2.time for suite2 in group]) + suite.time <= target:
- group.append(suite)
- else:
- rest.append(suite)
- end
- end
- return partition(target, groups + [group], rest)
-end
-
-def group_by_time(suites):
- longest = max([suite.time for suite in suites])
- groups = partition(longest, [], suites)
- return [{"name": name(suites), "suites": [suite.name for suite in suites]} for suites in groups]
-end
-
-# Used when we don't actually want multiple ct-suites per job
-def group_by_one(suites):
- return [{"name": suite.name, "suites": [suite.name]} for suite in suites]
-end
-
-def to_build_args(d):
- return ",".join(['{0}={1}'.format(k,d[k]) for k in d.keys()])
-end
diff --git a/workflow_sources/test/workflow.yml b/workflow_sources/test/workflow.yml
deleted file mode 100644
index c3746b1944..0000000000
--- a/workflow_sources/test/workflow.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-#@ load("@ytt:data", "data")
-#@ load("@ytt:template", "template")
-#@ load("util.star", "merge")
-#@ load("prepare.lib.yml", "prepare_jobs")
-#@ load("dep.star", "dep_jobs")
-#@ load("finish.lib.yml", "finish_jobs")
-
-#@ prepare = prepare_jobs()
-#@ deps = [dep_jobs(dep) for dep in data.values.deps]
-#@ finish = finish_jobs([prepare[k]['name'] for k in prepare])
-
----
-name: #@ "Test - Erlang " + data.values.erlang_version
-#! https://github.com/k14s/ytt/issues/189
-a_magic_string_that_we_will_sed_to_on: push
-jobs: #@ merge([prepare] + deps + [finish])
diff --git a/workspace_helpers.bzl b/workspace_helpers.bzl
new file mode 100644
index 0000000000..8e6516f360
--- /dev/null
+++ b/workspace_helpers.bzl
@@ -0,0 +1,279 @@
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository", "new_git_repository")
+load("@bazel-erlang//:github.bzl", "github_bazel_erlang_lib")
+load("@bazel-erlang//:hex_archive.bzl", "hex_archive")
+load("@bazel-erlang//:hex_pm.bzl", "hex_pm_bazel_erlang_lib")
+load("//:rabbitmq.bzl", "APP_VERSION")
+
+def rabbitmq_external_deps(rabbitmq_workspace = "@rabbitmq-server"):
+ hex_pm_bazel_erlang_lib(
+ name = "accept",
+ version = "0.3.5",
+ sha256 = "11b18c220bcc2eab63b5470c038ef10eb6783bcb1fcdb11aa4137defa5ac1bb8",
+ )
+
+ github_bazel_erlang_lib(
+ name = "aten",
+ org = "rabbitmq",
+ sha256 = "f27453bfa270538e1b48a9111206847e19a5ad51b4ded4f03fcb0184fbb555be",
+ ref = "v0.5.7",
+ version = "0.5.7",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "base64url",
+ version = "1.0.1",
+ sha256 = "f9b3add4731a02a9b0410398b475b33e7566a695365237a6bdee1bb447719f5c",
+ )
+
+ new_git_repository(
+ name = "bats",
+ remote = "https://github.com/sstephenson/bats",
+ tag = "v0.4.0",
+ build_file = rabbitmq_workspace + "//:BUILD.bats",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "cowboy",
+ first_srcs = [
+ "src/cowboy_stream.erl",
+ "src/cowboy_middleware.erl",
+ "src/cowboy_sub_protocol.erl",
+ ],
+ version = "2.8.0",
+ sha256 = "4643e4fba74ac96d4d152c75803de6fad0b3fa5df354c71afdd6cbeeb15fac8a",
+ deps = [
+ "@cowlib//:bazel_erlang_lib",
+ "@ranch//:bazel_erlang_lib",
+ ],
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "cowlib",
+ version = "2.9.1",
+ sha256 = "e4175dc240a70d996156160891e1c62238ede1729e45740bdd38064dad476170",
+ )
+
+ github_bazel_erlang_lib(
+ repo = "credentials-obfuscation",
+ name = "credentials_obfuscation",
+ org = "rabbitmq",
+ sha256 = "a5cecd861334a8a5fb8c9b108a74c83ba0041653c53c523bb97f70dbefa30fe3",
+ ref = "v2.4.0",
+ version = "2.4.0",
+ )
+
+ github_bazel_erlang_lib(
+ name = "ct_helper",
+ org = "extend",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "cuttlefish",
+ version = "3.0.1",
+ sha256 = "3feff3ae4ed1f0ca6df87ac89235068fbee9242ee85d2ac17fb1b8ce0e30f1a6",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "eetcd",
+ version = "0.3.3",
+ sha256 = "8fb280156ddd1b7b34d0f446c5711832385bff512c05378dcea8362f4f5060d6",
+ runtime_deps = [
+ "@gun//:bazel_erlang_lib",
+ ],
+ )
+
+ http_archive(
+ name = "emqttc",
+ urls = ["https://github.com/rabbitmq/emqttc/archive/remove-logging.zip"],
+ strip_prefix = "emqttc-remove-logging",
+ build_file_content = """load("@bazel-erlang//:bazel_erlang_lib.bzl", "erlang_lib")
+
+erlang_lib(
+ app_name = "emqttc",
+ erlc_opts = [
+ "+warn_export_all",
+ "+warn_unused_import",
+ ],
+)
+""",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "enough",
+ version = "0.1.0",
+ sha256 = "0460c7abda5f5e0ea592b12bc6976b8a5c4b96e42f332059cd396525374bf9a1",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "gen_batch_server",
+ version = "0.8.6",
+ sha256 = "b78679349168f27d7047f3283c9d766760b234d98c762aca9a1907f4ee3fd406",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "gun",
+ version = "1.3.3",
+ sha256 = "3106ce167f9c9723f849e4fb54ea4a4d814e3996ae243a1c828b256e749041e0",
+ first_srcs = [
+ "src/gun_content_handler.erl",
+ ],
+ runtime_deps = [
+ "@cowlib//:bazel_erlang_lib",
+ ],
+ erlc_opts = [
+ "+debug_info",
+ "+warn_export_vars",
+ "+warn_shadow_vars",
+ "+warn_obsolete_guard",
+ ],
+ )
+
+ http_archive(
+ name = "inet_tcp_proxy",
+ build_file = rabbitmq_workspace + "//:BUILD.inet_tcp_proxy",
+ strip_prefix = "inet_tcp_proxy-master",
+ urls = ["https://github.com/rabbitmq/inet_tcp_proxy/archive/master.zip"],
+ )
+
+ github_bazel_erlang_lib(
+ name = "jose",
+ repo = "erlang-jose",
+ org = "potatosalad",
+ ref = "2b1d66b5f4fbe33cb198149a8cb23895a2c877ea",
+ version = "2b1d66b5f4fbe33cb198149a8cb23895a2c877ea",
+ first_srcs = [
+ "src/jose_block_encryptor.erl",
+ "src/jwk/jose_jwk_use_enc.erl",
+ "src/jwk/jose_jwk_use_sig.erl",
+ "src/jwk/jose_jwk_oct.erl",
+ ],
+ sha256 = "7816f39d00655f2605cfac180755e97e268dba86c2f71037998ff63792ca727b",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "jsx",
+ version = "3.1.0",
+ sha256 = "0c5cc8fdc11b53cc25cf65ac6705ad39e54ecc56d1c22e4adb8f5a53fb9427f3",
+ )
+
+ github_bazel_erlang_lib(
+ name = "meck",
+ org = "eproxus",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "observer_cli",
+ version = "1.7.1",
+ sha256 = "4ccafaaa2ce01b85ddd14591f4d5f6731b4e13b610a70fb841f0701178478280",
+ )
+
+ git_repository(
+ name = "osiris",
+ branch = "main",
+ remote = "https://github.com/rabbitmq/osiris.git",
+ )
+
+ github_bazel_erlang_lib(
+ name = "prometheus",
+ repo = "prometheus.erl",
+ org = "deadtrickster",
+ ref = "06425c21a39c1564164f1cc3fe5bdfa8b23b1f78",
+ version = "06425c21a39c1564164f1cc3fe5bdfa8b23b1f78",
+ first_srcs = [
+ "src/prometheus_collector.erl",
+ "src/prometheus_format.erl",
+ "src/prometheus_instrumenter.erl",
+ "src/prometheus_metric.erl",
+ ],
+ deps = [
+ "@quantile_estimator//:bazel_erlang_lib",
+ ],
+ )
+
+ github_bazel_erlang_lib(
+ name = "proper",
+ first_srcs = [
+ "src/vararg.erl",
+ "src/proper_target.erl",
+ ],
+ org = "manopapad",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "quantile_estimator",
+ version = "0.2.1",
+ erlc_opts = [
+ "+debug_info",
+ ],
+ sha256 = "282a8a323ca2a845c9e6f787d166348f776c1d4a41ede63046d72d422e3da946",
+ )
+
+ git_repository(
+ name = "ra",
+ branch = "main",
+ remote = "https://github.com/rabbitmq/ra.git",
+ )
+
+ hex_archive(
+ name = "ranch",
+ version = "2.1.0",
+ sha256 = "244ee3fa2a6175270d8e1fc59024fd9dbc76294a321057de8f803b1479e76916",
+ build_file = rabbitmq_workspace + "//:BUILD.ranch",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "recon",
+ version = "2.5.1",
+ sha256 = "5721c6b6d50122d8f68cccac712caa1231f97894bab779eff5ff0f886cb44648",
+ )
+
+ github_bazel_erlang_lib(
+ name = "seshat",
+ org = "rabbitmq",
+ ref = "main",
+ version = "main",
+ extra_apps = [
+ "sasl",
+ "crypto",
+ ],
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "stdout_formatter",
+ version = "0.2.4",
+ sha256 = "51f1df921b0477275ea712763042155dbc74acc75d9648dbd54985c45c913b29",
+ )
+
+ github_bazel_erlang_lib(
+ name = "syslog",
+ org = "schlagert",
+ sha256 = "01c31c31d4d28e564da0660bdb69725ba37173fca5b3228829b8f3f416f9e486",
+ ref = "4.0.0",
+ version = "4.0.0",
+ first_srcs = [
+ "src/syslog_logger.erl",
+ ],
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "sysmon_handler",
+ version = "1.3.0",
+ sha256 = "922cf0dd558b9fdb1326168373315b52ed6a790ba943f6dcbd9ee22a74cebdef",
+ )
+
+ hex_pm_bazel_erlang_lib(
+ name = "systemd",
+ version = "0.6.1",
+ sha256 = "8ec5ed610a5507071cdb7423e663e2452a747a624bb8a58582acd9491ccad233",
+ deps = [
+ "@enough//:bazel_erlang_lib",
+ ],
+ )
+
+ new_git_repository(
+ name = "trust_store_http",
+ remote = "https://github.com/rabbitmq/trust-store-http.git",
+ branch = "master",
+ build_file = rabbitmq_workspace + "//:BUILD.trust_store_http",
+ )